Compare commits
322 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4771f736b0 | ||
|
|
8d7436579e | ||
|
|
43e1df98b1 | ||
|
|
3017030f52 | ||
|
|
348a0b8226 | ||
|
|
4e60c08120 | ||
|
|
10b58379cd | ||
|
|
fa9201b20f | ||
|
|
86f72997b4 | ||
|
|
62d0319aad | ||
|
|
a096a09eb7 | ||
|
|
7294241fed | ||
|
|
a5995d3999 | ||
|
|
30f31b2f3f | ||
|
|
bc09c873e9 | ||
|
|
8428be9dda | ||
|
|
6a45ad18f9 | ||
|
|
023b638729 | ||
|
|
7e13c14636 | ||
|
|
0c218fa9dd | ||
|
|
b3b0ccac73 | ||
|
|
fa0134551f | ||
|
|
34d62c9021 | ||
|
|
920bbbb3be | ||
|
|
762e5f10da | ||
|
|
27797cb361 | ||
|
|
066f9d1f66 | ||
|
|
acf7c2cdf2 | ||
|
|
469857a31a | ||
|
|
405e5decf2 | ||
|
|
da9abf8a24 | ||
|
|
128c6a1c76 | ||
|
|
99f32c10ef | ||
|
|
44e42f99db | ||
|
|
b1639eab0f | ||
|
|
679d3535b1 | ||
|
|
a795d4bba5 | ||
|
|
0a025cf5fa | ||
|
|
6a1ceb4db3 | ||
|
|
99c339c405 | ||
|
|
bd75d865ba | ||
|
|
d30e03b702 | ||
|
|
3b749faefb | ||
|
|
74f29d44b3 | ||
|
|
a397ceb54e | ||
|
|
502b3616df | ||
|
|
1a7178e32d | ||
|
|
7119956ec7 | ||
|
|
24035e217e | ||
|
|
21ffce674f | ||
|
|
4185f5fc94 | ||
|
|
3fdcc99304 | ||
|
|
0fa2a698ac | ||
|
|
2b871c58ed | ||
|
|
5078243938 | ||
|
|
b67e6d7257 | ||
|
|
632dfbcadb | ||
|
|
1f7da2f609 | ||
|
|
b3175305bd | ||
|
|
5b1933cb08 | ||
|
|
ae71ca0940 | ||
|
|
9b38729b95 | ||
|
|
402c286742 | ||
|
|
dcd4ac1d36 | ||
|
|
e6fbf746d8 | ||
|
|
803d248cb8 | ||
|
|
713be6970c | ||
|
|
62509f16db | ||
|
|
84899aef50 | ||
|
|
86b30e1887 | ||
|
|
033ef76cfe | ||
|
|
815596379c | ||
|
|
bc5e8bc65d | ||
|
|
b32de7259b | ||
|
|
29cb0cebd5 | ||
|
|
6744ebcb5b | ||
|
|
c6b026a82d | ||
|
|
cc706938ce | ||
|
|
84b98db36b | ||
|
|
01ad15e2bd | ||
|
|
54adf0e56f | ||
|
|
025e40b098 | ||
|
|
4534854001 | ||
|
|
362d54b471 | ||
|
|
fa533ff65e | ||
|
|
96fe367562 | ||
|
|
9566ffa384 | ||
|
|
f5835e1e72 | ||
|
|
fe65716706 | ||
|
|
873735900f | ||
|
|
28c166146e | ||
|
|
c6affc3108 | ||
|
|
59b49fd0df | ||
|
|
6ec003f899 | ||
|
|
95f2a9ad30 | ||
|
|
7317e8533d | ||
|
|
f80c20d70c | ||
|
|
01fc13c3e0 | ||
|
|
1edfecae42 | ||
|
|
9dab9a4632 | ||
|
|
98a824bfdc | ||
|
|
8fa9351ef1 | ||
|
|
64b2197844 | ||
|
|
26f1f80be7 | ||
|
|
2e4c2a6817 | ||
|
|
9498335e22 | ||
|
|
26f40110d0 | ||
|
|
9375d482b0 | ||
|
|
de9180a124 | ||
|
|
ba480d2cb7 | ||
|
|
5014748ee1 | ||
|
|
4e25ae5539 | ||
|
|
a2b802a5de | ||
|
|
9739b27718 | ||
|
|
bdf6476689 | ||
|
|
b8b627be1a | ||
|
|
03cad668aa | ||
|
|
2e0ec9aa38 | ||
|
|
4b02aae889 | ||
|
|
c91ba0b1b3 | ||
|
|
6b7f8b04e6 | ||
|
|
5472baab51 | ||
|
|
d5b6859bf8 | ||
|
|
8e23847c79 | ||
|
|
0c1579bae7 | ||
|
|
3dc50fff95 | ||
|
|
b8273f50c2 | ||
|
|
dbea28e9c6 | ||
|
|
a1b11ab039 | ||
|
|
1841b0dea6 | ||
|
|
b311bf2770 | ||
|
|
df13cef760 | ||
|
|
76ac264b25 | ||
|
|
93bc94add5 | ||
|
|
79325b8c61 | ||
|
|
58c1eb7004 | ||
|
|
466f292feb | ||
|
|
4de25a0d4a | ||
|
|
43d6bc0d82 | ||
|
|
b53d7c9ecc | ||
|
|
116d50a75a | ||
|
|
e1fb365096 | ||
|
|
03617dacfc | ||
|
|
e6b33f1bc9 | ||
|
|
d9e6a554f6 | ||
|
|
7ef37fe848 | ||
|
|
dd64617cbd | ||
|
|
a227c01a7f | ||
|
|
da53609385 | ||
|
|
e94a8fb2c3 | ||
|
|
d87542ab78 | ||
|
|
945ffb2fb3 | ||
|
|
da808ba25e | ||
|
|
e4b4c7ba39 | ||
|
|
43ead4bb0f | ||
|
|
c4a2f8af39 | ||
|
|
fd281a50b6 | ||
|
|
f8ef4fe6c9 | ||
|
|
faef0ae246 | ||
|
|
117932e272 | ||
|
|
4297708d3e | ||
|
|
2d00d8cb3e | ||
|
|
0190788658 | ||
|
|
b46d4a7166 | ||
|
|
994d529f59 | ||
|
|
7f347ae186 | ||
|
|
4a4c9f9ccf | ||
|
|
cd4ce186ca | ||
|
|
ca5403f97b | ||
|
|
7bb76f62a5 | ||
|
|
dcdbe44648 | ||
|
|
f91005f0ba | ||
|
|
5baea978ab | ||
|
|
9cc1f96eea | ||
|
|
2a65c4b5ca | ||
|
|
e1ef6615cc | ||
|
|
d607c9c821 | ||
|
|
f6b2186824 | ||
|
|
a3fc53059c | ||
|
|
6afe123947 | ||
|
|
7e9c6bb338 | ||
|
|
89a2768fc9 | ||
|
|
9ff3bdf302 | ||
|
|
2c48e89435 | ||
|
|
6ccb68aeb3 | ||
|
|
032243de0a | ||
|
|
5b1b9ec222 | ||
|
|
375c6b23a5 | ||
|
|
b378dff0dc | ||
|
|
23f2d287d6 | ||
|
|
3cd73e54a1 | ||
|
|
bf5d875079 | ||
|
|
aef884523b | ||
|
|
2a59f38faf | ||
|
|
3a0e60c772 | ||
|
|
fb657d41db | ||
|
|
ac7f389563 | ||
|
|
237e82eceb | ||
|
|
8fab4304a4 | ||
|
|
971c9e9147 | ||
|
|
cacbca5a07 | ||
|
|
e4dbd8adfb | ||
|
|
19f77c89e7 | ||
|
|
ce1b922f71 | ||
|
|
81e967864d | ||
|
|
29f55104bc | ||
|
|
ff2e2deb20 | ||
|
|
3fa55cb41b | ||
|
|
aa5d97a0d5 | ||
|
|
89c4f15ae8 | ||
|
|
1351bfc1cf | ||
|
|
32cc76f043 | ||
|
|
968cb2091d | ||
|
|
8986c1037b | ||
|
|
87b4a950f1 | ||
|
|
9f61c7338d | ||
|
|
ffc81dad69 | ||
|
|
7eba46b5cb | ||
|
|
aa177666a5 | ||
|
|
7de7b42fd2 | ||
|
|
03d1fd8019 | ||
|
|
485ec5fe2d | ||
|
|
59bfbd9811 | ||
|
|
1e485b37f8 | ||
|
|
4adf94f24b | ||
|
|
1a0fab36d3 | ||
|
|
a1ef3b5a8d | ||
|
|
0c47ee7119 | ||
|
|
e51cf6e482 | ||
|
|
24d5de813d | ||
|
|
9921b2a355 | ||
|
|
faa378c75e | ||
|
|
26199100dc | ||
|
|
bee854f059 | ||
|
|
73c1ebf3ed | ||
|
|
397dd17429 | ||
|
|
73d18dad92 | ||
|
|
94d63a3fdb | ||
|
|
120d89e8be | ||
|
|
eb5534c61c | ||
|
|
99d217e8f1 | ||
|
|
f7e3f8ae2a | ||
|
|
2cebd2d73d | ||
|
|
18df322c41 | ||
|
|
fc80f50560 | ||
|
|
4870ff9e7a | ||
|
|
58337bd38c | ||
|
|
e6d1e0933a | ||
|
|
68e3f25ba4 | ||
|
|
c981426db6 | ||
|
|
916b16b12c | ||
|
|
a178d230de | ||
|
|
fffb04728a | ||
|
|
658361383a | ||
|
|
3330026de6 | ||
|
|
25fe426720 | ||
|
|
8d53b5b8c0 | ||
|
|
0774735f0f | ||
|
|
a5540b94d5 | ||
|
|
c69d59858d | ||
|
|
962b1149ad | ||
|
|
a8edee0354 | ||
|
|
3627dde64d | ||
|
|
80ec0e42c2 | ||
|
|
fd64088bde | ||
|
|
7832e41a3b | ||
|
|
cadd65d69b | ||
|
|
9f004797fc | ||
|
|
9041fee7ad | ||
|
|
9af6c9057c | ||
|
|
757ce91138 | ||
|
|
98f96ed5c7 | ||
|
|
3e15120e2a | ||
|
|
5824e6c0cc | ||
|
|
7087d75efb | ||
|
|
b2a06b8fd3 | ||
|
|
1ee055faf5 | ||
|
|
404089dfca | ||
|
|
ed24948dee | ||
|
|
1f16fcb8eb | ||
|
|
03de3affd7 | ||
|
|
2bad887659 | ||
|
|
796be47a63 | ||
|
|
dc1fe11590 | ||
|
|
13b4ff3215 | ||
|
|
dca54cf4fb | ||
|
|
a4365e59f3 | ||
|
|
b960bccb86 | ||
|
|
218037200c | ||
|
|
4ac670e837 | ||
|
|
96eff65c3e | ||
|
|
45471607c8 | ||
|
|
14885744b1 | ||
|
|
d1fd61f1d1 | ||
|
|
6c1ee4a7dc | ||
|
|
9a8e799e68 | ||
|
|
ffec4e2f28 | ||
|
|
00102891a5 | ||
|
|
aa76b83428 | ||
|
|
a644189ea5 | ||
|
|
c5d987a8c9 | ||
|
|
bdd68ad68d | ||
|
|
2d86bca781 | ||
|
|
1b01ff6ec2 | ||
|
|
f08ec43507 | ||
|
|
7f9424260a | ||
|
|
5f21943353 | ||
|
|
a5a795f43c | ||
|
|
fcb6d7625f | ||
|
|
fd2179998f | ||
|
|
654e7f20e1 | ||
|
|
1eb92cf7c1 | ||
|
|
111e284cc8 | ||
|
|
1a4e3f4ec4 | ||
|
|
4066228e57 | ||
|
|
59c6d278e3 | ||
|
|
39b33f3d43 | ||
|
|
e8faf52b2b | ||
|
|
370e9bac63 | ||
|
|
d0746cec5a | ||
|
|
251937431b | ||
|
|
50faf061af |
@@ -1 +1,15 @@
|
||||
.env
|
||||
.dockerignore
|
||||
.env
|
||||
.env.sample
|
||||
.git
|
||||
.github
|
||||
.gitignore
|
||||
.idea
|
||||
.vscode
|
||||
|
||||
Dockerfile*
|
||||
README.md
|
||||
|
||||
test
|
||||
|
||||
venv
|
||||
159
.env.sample
159
.env.sample
@@ -1,43 +1,116 @@
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
## Additional logging information
|
||||
DEBUG = "True"
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "info"
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = "3600"
|
||||
## Log file where all output will be written to
|
||||
LOGFILE = "log.log"
|
||||
## Map usernames between plex and jellyfin in the event that they are different, order does not matter
|
||||
#USER_MAPPING = { "testuser2": "testuser3" }
|
||||
## Map libraries between plex and jellyfin in the even that they are different, order does not matter
|
||||
#LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
PLEX_TOKEN = "SuperSecretToken"
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
#PLEX_USERNAME = ""
|
||||
#PLEX_PASSWORD = ""
|
||||
#PLEX_SERVERNAME = "Plex Server"
|
||||
## Skip hostname validation for ssl certificates.
|
||||
SSL_BYPASS = "False"
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
JELLYFIN_TOKEN = "SuperSecretToken"
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = ""
|
||||
#BLACKLIST_LIBRARY_TYPE = ""
|
||||
#WHITELIST_LIBRARY_TYPE = ""
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "testuser1,testuser2"
|
||||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
|
||||
## Additional logging information
|
||||
DEBUG = "False"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "info"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "False"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = "3600"
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOGFILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
## by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "True"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma separated for multiple options
|
||||
#USER_MAPPING = { "testuser2": "testuser3", "testuser1":"testuser4" }
|
||||
|
||||
## Map libraries between servers in the event that they are different, order does not matter
|
||||
## Comma separated for multiple options
|
||||
#LIBRARY_MAPPING = { "Shows": "TV Shows", "Movie": "Movies" }
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma separated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = ""
|
||||
#BLACKLIST_LIBRARY_TYPE = ""
|
||||
#WHITELIST_LIBRARY_TYPE = ""
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "testuser1,testuser2"
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma separated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400, https://nas:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma separated list for multiple servers
|
||||
PLEX_TOKEN = "SuperSecretToken, SuperSecretToken2"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma separated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "False"
|
||||
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma separated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096, http://nas:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma separated list for multiple servers
|
||||
JELLYFIN_TOKEN = "SuperSecretToken, SuperSecretToken2"
|
||||
|
||||
|
||||
# Emby
|
||||
|
||||
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_BASEURL = "http://localhost:8097"
|
||||
|
||||
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||
15
.github/FUNDING.yml
vendored
Normal file
15
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: [Luigi311]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||
polar: # Replace with a single Polar username
|
||||
buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
|
||||
thanks_dev: # Replace with a single thanks.dev username
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: "[BUG]"
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Logs**
|
||||
If applicable, add logs to help explain your problem ideally with DEBUG set to true, be sure to remove sensitive information
|
||||
|
||||
**Type:**
|
||||
- [ ] Docker Compose
|
||||
- [ ] Docker
|
||||
- [ ] Unraid
|
||||
- [ ] Native
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: "[Feature Request]"
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
188
.github/workflows/ci.yml
vendored
188
.github/workflows/ci.yml
vendored
@@ -1,5 +1,6 @@
|
||||
name: CI
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths-ignore:
|
||||
- .gitignore
|
||||
@@ -8,12 +9,19 @@ on:
|
||||
paths-ignore:
|
||||
- .gitignore
|
||||
- "*.md"
|
||||
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: '3.13'
|
||||
|
||||
jobs:
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: "Install dependencies"
|
||||
run: pip install -r requirements.txt && pip install -r test/requirements.txt
|
||||
@@ -21,60 +29,178 @@ jobs:
|
||||
- name: "Run tests"
|
||||
run: pytest -vvv
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: "Install dependencies"
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
sudo apt update && sudo apt install -y docker-compose
|
||||
|
||||
- name: "Checkout JellyPlex-Watched-CI"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: luigi311/JellyPlex-Watched-CI
|
||||
path: JellyPlex-Watched-CI
|
||||
|
||||
- name: "Start containers"
|
||||
run: |
|
||||
JellyPlex-Watched-CI/start_containers.sh
|
||||
|
||||
# Wait for containers to start
|
||||
sleep 10
|
||||
|
||||
for FOLDER in $(find "JellyPlex-Watched-CI" -type f -name "docker-compose.yml" -exec dirname {} \;); do
|
||||
docker compose -f "${FOLDER}/docker-compose.yml" logs
|
||||
done
|
||||
|
||||
- name: "Test Plex"
|
||||
run: |
|
||||
mv test/ci_plex.env .env
|
||||
python main.py
|
||||
python test/validate_ci_marklog.py --plex
|
||||
|
||||
rm mark.log
|
||||
|
||||
- name: "Test Jellyfin"
|
||||
run: |
|
||||
mv test/ci_jellyfin.env .env
|
||||
python main.py
|
||||
python test/validate_ci_marklog.py --jellyfin
|
||||
|
||||
rm mark.log
|
||||
|
||||
- name: "Test Emby"
|
||||
run: |
|
||||
mv test/ci_emby.env .env
|
||||
python main.py
|
||||
python test/validate_ci_marklog.py --emby
|
||||
|
||||
rm mark.log
|
||||
|
||||
- name: "Test Guids"
|
||||
run: |
|
||||
mv test/ci_guids.env .env
|
||||
python main.py
|
||||
python test/validate_ci_marklog.py --guids
|
||||
|
||||
rm mark.log
|
||||
|
||||
- name: "Test Locations"
|
||||
run: |
|
||||
mv test/ci_locations.env .env
|
||||
python main.py
|
||||
python test/validate_ci_marklog.py --locations
|
||||
|
||||
rm mark.log
|
||||
|
||||
- name: "Test writing to the servers"
|
||||
run: |
|
||||
# Test writing to the servers
|
||||
mv test/ci_write.env .env
|
||||
python main.py
|
||||
|
||||
# Test again to test if it can handle existing data
|
||||
python main.py
|
||||
|
||||
python test/validate_ci_marklog.py --write
|
||||
|
||||
rm mark.log
|
||||
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
needs: pytest
|
||||
needs:
|
||||
- pytest
|
||||
- test
|
||||
env:
|
||||
DEFAULT_VARIANT: alpine
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- dockerfile: Dockerfile.alpine
|
||||
variant: alpine
|
||||
- dockerfile: Dockerfile.slim
|
||||
variant: slim
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Docker meta
|
||||
id: docker_meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ secrets.DOCKER_USERNAME }}/jellyplex-watched,enable=${{ secrets.DOCKER_USERNAME != '' }}
|
||||
# Do not push to ghcr.io on PRs due to permission issues, only push if the owner is luigi311 so it doesnt fail on forks
|
||||
ghcr.io/${{ github.repository }},enable=${{ github.event_name != 'pull_request' && github.repository_owner == 'luigi311'}}
|
||||
flavor: latest=false
|
||||
tags: |
|
||||
type=raw,value=latest,enable=${{ matrix.variant == env.DEFAULT_VARIANT && startsWith(github.ref, 'refs/tags/') }}
|
||||
type=raw,value=latest,suffix=-${{ matrix.variant }},enable=${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
|
||||
type=ref,event=branch,suffix=-${{ matrix.variant }}
|
||||
type=ref,event=branch,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
|
||||
type=ref,event=pr,suffix=-${{ matrix.variant }}
|
||||
type=ref,event=pr,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
|
||||
type=semver,pattern={{ version }},suffix=-${{ matrix.variant }}
|
||||
type=semver,pattern={{ version }},enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
|
||||
type=semver,pattern={{ major }}.{{ minor }},suffix=-${{ matrix.variant }}
|
||||
type=semver,pattern={{ major }}.{{ minor }},enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
|
||||
type=sha,suffix=-${{ matrix.variant }}
|
||||
type=sha,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
if: "${{ env.DOCKER_USERNAME != '' }}"
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_USERNAME }}/jellyplex-watched # list of Docker images to use as base name for tags
|
||||
tags: |
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=sha
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build
|
||||
id: build
|
||||
if: "${{ steps.docker_meta.outcome == 'skipped' }}"
|
||||
uses: docker/build-push-action@v2
|
||||
if: "${{ steps.docker_meta.outputs.tags == '' }}"
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
file: ${{ matrix.dockerfile }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: false
|
||||
tags: jellyplex-watched:action
|
||||
|
||||
|
||||
- name: Build Push
|
||||
id: build_push
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
uses: docker/build-push-action@v2
|
||||
if: "${{ steps.docker_meta.outputs.tags != '' }}"
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
file: ${{ matrix.dockerfile }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,4 +1,4 @@
|
||||
.env
|
||||
**.env*
|
||||
*.prof
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
|
||||
11
.vscode/launch.json
vendored
11
.vscode/launch.json
vendored
@@ -11,6 +11,17 @@
|
||||
"program": "main.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true
|
||||
},
|
||||
{
|
||||
"name": "Pytest",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"module": "pytest",
|
||||
"args": [
|
||||
"-vv"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
7
.vscode/settings.json
vendored
Normal file
7
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"[python]" : {
|
||||
"editor.formatOnSave": true,
|
||||
},
|
||||
"python.formatting.provider": "black",
|
||||
|
||||
}
|
||||
41
Dockerfile
41
Dockerfile
@@ -1,41 +0,0 @@
|
||||
FROM python:3-slim
|
||||
|
||||
ENV DRYRUN 'True'
|
||||
ENV DEBUG 'True'
|
||||
ENV DEBUG_LEVEL 'INFO'
|
||||
ENV SLEEP_DURATION '3600'
|
||||
ENV LOGFILE 'log.log'
|
||||
|
||||
ENV USER_MAPPING '{ "User Test": "User Test2" }'
|
||||
ENV LIBRARY_MAPPING '{ "Shows Test": "TV Shows Test" }'
|
||||
|
||||
ENV PLEX_BASEURL 'http://localhost:32400'
|
||||
ENV PLEX_TOKEN ''
|
||||
ENV PLEX_USERNAME ''
|
||||
ENV PLEX_PASSWORD ''
|
||||
ENV PLEX_SERVERNAME ''
|
||||
|
||||
ENV JELLYFIN_BASEURL 'http://localhost:8096'
|
||||
ENV JELLYFIN_TOKEN ''
|
||||
|
||||
ENV BLACKLIST_LIBRARY ''
|
||||
ENV WHITELIST_LIBRARY ''
|
||||
ENV BLACKLIST_LIBRARY_TYPE ''
|
||||
ENV WHITELIST_LIBRARY_TYPE ''
|
||||
ENV BLACKLIST_USERS ''
|
||||
ENV WHITELIST_USERS ''
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
build-essential && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY ./requirements.txt ./
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
CMD ["python", "-u", "main.py"]
|
||||
49
Dockerfile.alpine
Normal file
49
Dockerfile.alpine
Normal file
@@ -0,0 +1,49 @@
|
||||
FROM python:3.13-alpine
|
||||
|
||||
ENV PUID=1000
|
||||
ENV PGID=1000
|
||||
ENV GOSU_VERSION=1.17
|
||||
|
||||
RUN apk add --no-cache tini dos2unix
|
||||
|
||||
# Install gosu
|
||||
RUN set -eux; \
|
||||
\
|
||||
apk add --no-cache --virtual .gosu-deps \
|
||||
ca-certificates \
|
||||
dpkg \
|
||||
gnupg \
|
||||
; \
|
||||
\
|
||||
dpkgArch="$(dpkg --print-architecture | awk -F- '{ print $NF }')"; \
|
||||
wget -O /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$dpkgArch"; \
|
||||
wget -O /usr/local/bin/gosu.asc "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$dpkgArch.asc"; \
|
||||
\
|
||||
# verify the signature
|
||||
export GNUPGHOME="$(mktemp -d)"; \
|
||||
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4; \
|
||||
gpg --batch --verify /usr/local/bin/gosu.asc /usr/local/bin/gosu; \
|
||||
gpgconf --kill all; \
|
||||
rm -rf "$GNUPGHOME" /usr/local/bin/gosu.asc; \
|
||||
\
|
||||
# clean up fetch dependencies
|
||||
apk del --no-network .gosu-deps; \
|
||||
\
|
||||
chmod +x /usr/local/bin/gosu; \
|
||||
# verify that the binary works
|
||||
gosu --version; \
|
||||
gosu nobody true
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./requirements.txt ./
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN chmod +x *.sh && \
|
||||
dos2unix *.sh
|
||||
|
||||
ENTRYPOINT ["tini", "--", "/app/entrypoint.sh"]
|
||||
CMD ["python", "-u", "main.py"]
|
||||
23
Dockerfile.slim
Normal file
23
Dockerfile.slim
Normal file
@@ -0,0 +1,23 @@
|
||||
FROM python:3.13-slim
|
||||
|
||||
ENV PUID=1000
|
||||
ENV PGID=1000
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install tini gosu dos2unix --yes --no-install-recommends && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./requirements.txt ./
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN chmod +x *.sh && \
|
||||
dos2unix *.sh
|
||||
|
||||
ENTRYPOINT ["/bin/tini", "--", "/app/entrypoint.sh"]
|
||||
CMD ["python", "-u", "main.py"]
|
||||
187
README.md
187
README.md
@@ -1,73 +1,114 @@
|
||||
# JellyPlex-Watched
|
||||
|
||||
[](https://www.codacy.com/gh/luigi311/JellyPlex-Watched/dashboard?utm_source=github.com&utm_medium=referral&utm_content=luigi311/JellyPlex-Watched&utm_campaign=Badge_Grade)
|
||||
|
||||
Sync watched between jellyfin and plex
|
||||
|
||||
## Description
|
||||
|
||||
Keep in sync all your users watched history between jellyfin and plex servers locally. This uses the imdb ids and any other matching id to find the correct episode/movie between the two. This is not perfect but it works for most cases. You can use this for as many servers as you want by enterying multiple options in the .env plex/jellyfin section seperated by commas.
|
||||
|
||||
## Configuration
|
||||
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
### Baremetal
|
||||
|
||||
- Setup virtualenv of your choice
|
||||
|
||||
- Install dependencies
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
- Create a .env file similar to .env.sample, uncomment whitelist and blacklist if needed, fill in baseurls and tokens
|
||||
|
||||
- Run
|
||||
|
||||
```bash
|
||||
python main.py
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
- Build docker image
|
||||
|
||||
```bash
|
||||
docker build -t jellyplex-watched .
|
||||
```
|
||||
|
||||
- or use pre-built image
|
||||
|
||||
```bash
|
||||
docker pull luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
#### With variables
|
||||
|
||||
- Run
|
||||
|
||||
```bash
|
||||
docker run --rm -it -e PLEX_TOKEN='SuperSecretToken' luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
#### With .env
|
||||
|
||||
- Create a .env file similar to .env.sample and set the variables to match your setup
|
||||
|
||||
- Run
|
||||
|
||||
```bash
|
||||
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
I am open to recieving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable. Make all pull requests against the dev branch and nothing will be merged into the main without going through the lower branches.
|
||||
|
||||
## License
|
||||
|
||||
This is currently under the GNU General Public License v3.0.
|
||||
# JellyPlex-Watched
|
||||
|
||||
[](https://www.codacy.com/gh/luigi311/JellyPlex-Watched/dashboard?utm_source=github.com&utm_medium=referral&utm_content=luigi311/JellyPlex-Watched&utm_campaign=Badge_Grade)
|
||||
|
||||
Sync watched between jellyfin, plex and emby locally
|
||||
|
||||
## Description
|
||||
|
||||
Keep in sync all your users watched history between jellyfin, plex and emby servers locally. This uses file names and provider ids to find the correct episode/movie between the two. This is not perfect but it works for most cases. You can use this for as many servers as you want by entering multiple options in the .env plex/jellyfin section separated by commas.
|
||||
|
||||
## Features
|
||||
|
||||
### Plex
|
||||
|
||||
- \[x] Match via filenames
|
||||
- \[x] Match via provider ids
|
||||
- \[x] Map usernames
|
||||
- \[x] Use single login
|
||||
- \[x] One way/multi way sync
|
||||
- \[x] Sync watched
|
||||
- \[x] Sync in progress
|
||||
|
||||
### Jellyfin
|
||||
|
||||
- \[x] Match via filenames
|
||||
- \[x] Match via provider ids
|
||||
- \[x] Map usernames
|
||||
- \[x] Use single login
|
||||
- \[x] One way/multi way sync
|
||||
- \[x] Sync watched
|
||||
- \[x] Sync in progress
|
||||
|
||||
### Emby
|
||||
|
||||
- \[x] Match via filenames
|
||||
- \[x] Match via provider ids
|
||||
- \[x] Map usernames
|
||||
- \[x] Use single login
|
||||
- \[x] One way/multi way sync
|
||||
- \[x] Sync watched
|
||||
- \[x] Sync in progress
|
||||
|
||||
## Configuration
|
||||
|
||||
Full list of configuration options can be found in the [.env.sample](.env.sample)
|
||||
|
||||
## Installation
|
||||
|
||||
### Baremetal
|
||||
|
||||
- Setup virtualenv of your choice
|
||||
|
||||
- Install dependencies
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
- Create a .env file similar to .env.sample, uncomment whitelist and blacklist if needed, fill in baseurls and tokens
|
||||
|
||||
- Run
|
||||
|
||||
```bash
|
||||
python main.py
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
- Build docker image
|
||||
|
||||
```bash
|
||||
docker build -t jellyplex-watched .
|
||||
```
|
||||
|
||||
- or use pre-built image
|
||||
|
||||
```bash
|
||||
docker pull luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
#### With variables
|
||||
|
||||
- Run
|
||||
|
||||
```bash
|
||||
docker run --rm -it -e PLEX_TOKEN='SuperSecretToken' luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
#### With .env
|
||||
|
||||
- Create a .env file similar to .env.sample and set the variables to match your setup
|
||||
|
||||
- Run
|
||||
|
||||
```bash
|
||||
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
## Troubleshooting/Issues
|
||||
|
||||
- Jellyfin
|
||||
|
||||
- Attempt to decode JSON with unexpected mimetype, make sure you enable remote access or add your docker subnet to lan networks in jellyfin settings
|
||||
|
||||
- Configuration
|
||||
- Do not use quotes around variables in docker compose
|
||||
|
||||
## Contributing
|
||||
|
||||
I am open to receiving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable.
|
||||
|
||||
## License
|
||||
|
||||
This is currently under the GNU General Public License v3.0.
|
||||
|
||||
11
docker-compose.yml
Normal file
11
docker-compose.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
# Sync watched status between media servers locally
|
||||
|
||||
services:
|
||||
jellyplex-watched:
|
||||
image: luigi311/jellyplex-watched:latest
|
||||
container_name: jellyplex-watched
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
env_file: "./.env"
|
||||
61
entrypoint.sh
Normal file
61
entrypoint.sh
Normal file
@@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
set -e
|
||||
|
||||
# Check if user is root
|
||||
if [ "$(id -u)" = '0' ]; then
|
||||
echo "User is root, checking if we need to create a user and group based on environment variables"
|
||||
# Create group and user based on environment variables
|
||||
if [ ! "$(getent group "$PGID")" ]; then
|
||||
# If groupadd exists, use it
|
||||
if command -v groupadd > /dev/null; then
|
||||
groupadd -g "$PGID" jellyplex_watched
|
||||
elif command -v addgroup > /dev/null; then
|
||||
addgroup -g "$PGID" jellyplex_watched
|
||||
fi
|
||||
fi
|
||||
|
||||
# If user id does not exist, create the user
|
||||
if [ ! "$(getent passwd "$PUID")" ]; then
|
||||
if command -v useradd > /dev/null; then
|
||||
useradd --no-create-home -u "$PUID" -g "$PGID" jellyplex_watched
|
||||
elif command -v adduser > /dev/null; then
|
||||
# Get the group name based on the PGID since adduser does not have a flag to specify the group id
|
||||
# and if the group id already exists the group name will be sommething unexpected
|
||||
GROUPNAME=$(getent group "$PGID" | cut -d: -f1)
|
||||
|
||||
# Use alpine busybox adduser syntax
|
||||
adduser -D -H -u "$PUID" -G "$GROUPNAME" jellyplex_watched
|
||||
fi
|
||||
fi
|
||||
else
|
||||
# If user is not root, set the PUID and PGID to the current user
|
||||
PUID=$(id -u)
|
||||
PGID=$(id -g)
|
||||
fi
|
||||
|
||||
# Get directory of log and mark file to create base folder if it doesnt exist
|
||||
LOG_DIR=$(dirname "$LOG_FILE")
|
||||
# If LOG_DIR is set, create the directory
|
||||
if [ -n "$LOG_DIR" ]; then
|
||||
mkdir -p "$LOG_DIR"
|
||||
fi
|
||||
|
||||
MARK_DIR=$(dirname "$MARK_FILE")
|
||||
if [ -n "$MARK_DIR" ]; then
|
||||
mkdir -p "$MARK_DIR"
|
||||
fi
|
||||
|
||||
echo "Starting JellyPlex-Watched with UID: $PUID and GID: $PGID"
|
||||
|
||||
# If root run as the created user
|
||||
if [ "$(id -u)" = '0' ]; then
|
||||
chown -R "$PUID:$PGID" "$LOG_DIR"
|
||||
chown -R "$PUID:$PGID" "$MARK_DIR"
|
||||
|
||||
# Run the application as the created user
|
||||
exec gosu "$PUID:$PGID" "$@"
|
||||
fi
|
||||
|
||||
# Run the application as the current user
|
||||
exec "$@"
|
||||
6
main.py
6
main.py
@@ -1,9 +1,9 @@
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check python version 3.6 or higher
|
||||
if not (3, 6) <= tuple(map(int, sys.version_info[:2])):
|
||||
print("This script requires Python 3.6 or higher")
|
||||
# Check python version 3.9 or higher
|
||||
if not (3, 9) <= tuple(map(int, sys.version_info[:2])):
|
||||
print("This script requires Python 3.9 or higher")
|
||||
sys.exit(1)
|
||||
|
||||
from src.main import main
|
||||
|
||||
BIN
requirements.txt
BIN
requirements.txt
Binary file not shown.
92
src/black_white.py
Normal file
92
src/black_white.py
Normal file
@@ -0,0 +1,92 @@
|
||||
from src.functions import logger, search_mapping
|
||||
|
||||
|
||||
def setup_black_white_lists(
|
||||
blacklist_library: str,
|
||||
whitelist_library: str,
|
||||
blacklist_library_type: str,
|
||||
whitelist_library_type: str,
|
||||
blacklist_users: str,
|
||||
whitelist_users: str,
|
||||
library_mapping=None,
|
||||
user_mapping=None,
|
||||
):
|
||||
blacklist_library, blacklist_library_type, blacklist_users = setup_x_lists(
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
blacklist_users,
|
||||
"Black",
|
||||
library_mapping,
|
||||
user_mapping,
|
||||
)
|
||||
|
||||
whitelist_library, whitelist_library_type, whitelist_users = setup_x_lists(
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
whitelist_users,
|
||||
"White",
|
||||
library_mapping,
|
||||
user_mapping,
|
||||
)
|
||||
|
||||
return (
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
)
|
||||
|
||||
|
||||
def setup_x_lists(
|
||||
xlist_library,
|
||||
xlist_library_type,
|
||||
xlist_users,
|
||||
xlist_type,
|
||||
library_mapping=None,
|
||||
user_mapping=None,
|
||||
):
|
||||
if xlist_library:
|
||||
if len(xlist_library) > 0:
|
||||
xlist_library = xlist_library.split(",")
|
||||
xlist_library = [x.strip() for x in xlist_library]
|
||||
if library_mapping:
|
||||
temp_library = []
|
||||
for library in xlist_library:
|
||||
library_other = search_mapping(library_mapping, library)
|
||||
if library_other:
|
||||
temp_library.append(library_other)
|
||||
|
||||
xlist_library = xlist_library + temp_library
|
||||
else:
|
||||
xlist_library = []
|
||||
logger(f"{xlist_type}list Library: {xlist_library}", 1)
|
||||
|
||||
if xlist_library_type:
|
||||
if len(xlist_library_type) > 0:
|
||||
xlist_library_type = xlist_library_type.split(",")
|
||||
xlist_library_type = [x.lower().strip() for x in xlist_library_type]
|
||||
else:
|
||||
xlist_library_type = []
|
||||
logger(f"{xlist_type}list Library Type: {xlist_library_type}", 1)
|
||||
|
||||
if xlist_users:
|
||||
if len(xlist_users) > 0:
|
||||
xlist_users = xlist_users.split(",")
|
||||
xlist_users = [x.lower().strip() for x in xlist_users]
|
||||
if user_mapping:
|
||||
temp_users = []
|
||||
for user in xlist_users:
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
if user_other:
|
||||
temp_users.append(user_other)
|
||||
|
||||
xlist_users = xlist_users + temp_users
|
||||
else:
|
||||
xlist_users = []
|
||||
else:
|
||||
xlist_users = []
|
||||
logger(f"{xlist_type}list Users: {xlist_users}", 1)
|
||||
|
||||
return xlist_library, xlist_library_type, xlist_users
|
||||
139
src/connection.py
Normal file
139
src/connection.py
Normal file
@@ -0,0 +1,139 @@
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from src.functions import logger, str_to_bool
|
||||
from src.plex import Plex
|
||||
from src.jellyfin import Jellyfin
|
||||
from src.emby import Emby
|
||||
|
||||
load_dotenv(override=True)
|
||||
|
||||
|
||||
def jellyfin_emby_server_connection(server_baseurl, server_token, server_type):
|
||||
servers = []
|
||||
|
||||
server_baseurl = server_baseurl.split(",")
|
||||
server_token = server_token.split(",")
|
||||
|
||||
if len(server_baseurl) != len(server_token):
|
||||
raise Exception(
|
||||
f"{server_type.upper()}_BASEURL and {server_type.upper()}_TOKEN must have the same number of entries"
|
||||
)
|
||||
|
||||
for i, baseurl in enumerate(server_baseurl):
|
||||
baseurl = baseurl.strip()
|
||||
if baseurl[-1] == "/":
|
||||
baseurl = baseurl[:-1]
|
||||
|
||||
if server_type == "jellyfin":
|
||||
server = Jellyfin(baseurl=baseurl, token=server_token[i].strip())
|
||||
servers.append(
|
||||
(
|
||||
"jellyfin",
|
||||
server,
|
||||
)
|
||||
)
|
||||
|
||||
elif server_type == "emby":
|
||||
server = Emby(baseurl=baseurl, token=server_token[i].strip())
|
||||
servers.append(
|
||||
(
|
||||
"emby",
|
||||
server,
|
||||
)
|
||||
)
|
||||
else:
|
||||
raise Exception("Unknown server type")
|
||||
|
||||
logger(f"{server_type} Server {i} info: {server.info()}", 3)
|
||||
|
||||
return servers
|
||||
|
||||
|
||||
def generate_server_connections():
|
||||
servers = []
|
||||
|
||||
plex_baseurl = os.getenv("PLEX_BASEURL", None)
|
||||
plex_token = os.getenv("PLEX_TOKEN", None)
|
||||
plex_username = os.getenv("PLEX_USERNAME", None)
|
||||
plex_password = os.getenv("PLEX_PASSWORD", None)
|
||||
plex_servername = os.getenv("PLEX_SERVERNAME", None)
|
||||
ssl_bypass = str_to_bool(os.getenv("SSL_BYPASS", "False"))
|
||||
|
||||
if plex_baseurl and plex_token:
|
||||
plex_baseurl = plex_baseurl.split(",")
|
||||
plex_token = plex_token.split(",")
|
||||
|
||||
if len(plex_baseurl) != len(plex_token):
|
||||
raise Exception(
|
||||
"PLEX_BASEURL and PLEX_TOKEN must have the same number of entries"
|
||||
)
|
||||
|
||||
for i, url in enumerate(plex_baseurl):
|
||||
server = Plex(
|
||||
baseurl=url.strip(),
|
||||
token=plex_token[i].strip(),
|
||||
username=None,
|
||||
password=None,
|
||||
servername=None,
|
||||
ssl_bypass=ssl_bypass,
|
||||
)
|
||||
|
||||
logger(f"Plex Server {i} info: {server.info()}", 3)
|
||||
|
||||
servers.append(
|
||||
(
|
||||
"plex",
|
||||
server,
|
||||
)
|
||||
)
|
||||
|
||||
if plex_username and plex_password and plex_servername:
|
||||
plex_username = plex_username.split(",")
|
||||
plex_password = plex_password.split(",")
|
||||
plex_servername = plex_servername.split(",")
|
||||
|
||||
if len(plex_username) != len(plex_password) or len(plex_username) != len(
|
||||
plex_servername
|
||||
):
|
||||
raise Exception(
|
||||
"PLEX_USERNAME, PLEX_PASSWORD and PLEX_SERVERNAME must have the same number of entries"
|
||||
)
|
||||
|
||||
for i, username in enumerate(plex_username):
|
||||
server = Plex(
|
||||
baseurl=None,
|
||||
token=None,
|
||||
username=username.strip(),
|
||||
password=plex_password[i].strip(),
|
||||
servername=plex_servername[i].strip(),
|
||||
ssl_bypass=ssl_bypass,
|
||||
)
|
||||
|
||||
logger(f"Plex Server {i} info: {server.info()}", 3)
|
||||
servers.append(
|
||||
(
|
||||
"plex",
|
||||
server,
|
||||
)
|
||||
)
|
||||
|
||||
jellyfin_baseurl = os.getenv("JELLYFIN_BASEURL", None)
|
||||
jellyfin_token = os.getenv("JELLYFIN_TOKEN", None)
|
||||
|
||||
if jellyfin_baseurl and jellyfin_token:
|
||||
servers.extend(
|
||||
jellyfin_emby_server_connection(
|
||||
jellyfin_baseurl, jellyfin_token, "jellyfin"
|
||||
)
|
||||
)
|
||||
|
||||
emby_baseurl = os.getenv("EMBY_BASEURL", None)
|
||||
emby_token = os.getenv("EMBY_TOKEN", None)
|
||||
|
||||
if emby_baseurl and emby_token:
|
||||
servers.extend(
|
||||
jellyfin_emby_server_connection(emby_baseurl, emby_token, "emby")
|
||||
)
|
||||
|
||||
return servers
|
||||
25
src/emby.py
Normal file
25
src/emby.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from src.jellyfin_emby import JellyfinEmby
|
||||
from packaging import version
|
||||
|
||||
|
||||
class Emby(JellyfinEmby):
|
||||
def __init__(self, baseurl, token):
|
||||
authorization = (
|
||||
"Emby , "
|
||||
'Client="JellyPlex-Watched", '
|
||||
'Device="script", '
|
||||
'DeviceId="script", '
|
||||
'Version="6.0.2"'
|
||||
)
|
||||
headers = {
|
||||
"Accept": "application/json",
|
||||
"X-Emby-Token": token,
|
||||
"X-Emby-Authorization": authorization,
|
||||
}
|
||||
|
||||
super().__init__(
|
||||
server_type="Emby", baseurl=baseurl, token=token, headers=headers
|
||||
)
|
||||
|
||||
def is_partial_update_supported(self, server_version):
|
||||
return server_version > version.parse("4.4")
|
||||
195
src/functions.py
195
src/functions.py
@@ -4,17 +4,18 @@ from dotenv import load_dotenv
|
||||
|
||||
load_dotenv(override=True)
|
||||
|
||||
logfile = os.getenv("LOGFILE", "log.log")
|
||||
log_file = os.getenv("LOG_FILE", os.getenv("LOGFILE", "log.log"))
|
||||
mark_file = os.getenv("MARK_FILE", os.getenv("MARKFILE", "mark.log"))
|
||||
|
||||
|
||||
def logger(message: str, log_type=0):
|
||||
debug = str_to_bool(os.getenv("DEBUG", "True"))
|
||||
debug = str_to_bool(os.getenv("DEBUG", "False"))
|
||||
debug_level = os.getenv("DEBUG_LEVEL", "info").lower()
|
||||
|
||||
output = str(message)
|
||||
if log_type == 0:
|
||||
pass
|
||||
elif log_type == 1 and (debug and debug_level == "info"):
|
||||
elif log_type == 1 and (debug and debug_level in ("info", "debug")):
|
||||
output = f"[INFO]: {output}"
|
||||
elif log_type == 2:
|
||||
output = f"[ERROR]: {output}"
|
||||
@@ -22,12 +23,40 @@ def logger(message: str, log_type=0):
|
||||
output = f"[DEBUG]: {output}"
|
||||
elif log_type == 4:
|
||||
output = f"[WARNING]: {output}"
|
||||
elif log_type == 5:
|
||||
output = f"[MARK]: {output}"
|
||||
elif log_type == 6:
|
||||
output = f"[DRYRUN]: {output}"
|
||||
else:
|
||||
output = None
|
||||
|
||||
if output is not None:
|
||||
print(output)
|
||||
file = open(logfile, "a", encoding="utf-8")
|
||||
with open(f"{log_file}", "a", encoding="utf-8") as file:
|
||||
file.write(output + "\n")
|
||||
|
||||
|
||||
def log_marked(
|
||||
server_type: str,
|
||||
server_name: str,
|
||||
username: str,
|
||||
library: str,
|
||||
movie_show: str,
|
||||
episode: str = None,
|
||||
duration=None,
|
||||
):
|
||||
if mark_file is None:
|
||||
return
|
||||
|
||||
output = f"{server_type}/{server_name}/{username}/{library}/{movie_show}"
|
||||
|
||||
if episode:
|
||||
output += f"/{episode}"
|
||||
|
||||
if duration:
|
||||
output += f"/{duration}"
|
||||
|
||||
with open(f"{mark_file}", "a", encoding="utf-8") as file:
|
||||
file.write(output + "\n")
|
||||
|
||||
|
||||
@@ -39,6 +68,21 @@ def str_to_bool(value: any) -> bool:
|
||||
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
|
||||
|
||||
|
||||
# Search for nested element in list
|
||||
def contains_nested(element, lst):
|
||||
if lst is None:
|
||||
return None
|
||||
|
||||
for i, item in enumerate(lst):
|
||||
if item is None:
|
||||
continue
|
||||
if element in item:
|
||||
return i
|
||||
elif element == item:
|
||||
return i
|
||||
return None
|
||||
|
||||
|
||||
# Get mapped value
|
||||
def search_mapping(dictionary: dict, key_value: str):
|
||||
if key_value in dictionary.keys():
|
||||
@@ -55,124 +99,39 @@ def search_mapping(dictionary: dict, key_value: str):
|
||||
return None
|
||||
|
||||
|
||||
def check_skip_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
# Return list of objects that exist in both lists including mappings
|
||||
def match_list(list1, list2, list_mapping=None):
|
||||
output = []
|
||||
for element in list1:
|
||||
if element in list2:
|
||||
output.append(element)
|
||||
elif list_mapping:
|
||||
element_other = search_mapping(list_mapping, element)
|
||||
if element_other in list2:
|
||||
output.append(element)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def future_thread_executor(
|
||||
args: list, threads: int = None, override_threads: bool = False
|
||||
):
|
||||
skip_reason = None
|
||||
|
||||
if library_type.lower() in blacklist_library_type:
|
||||
skip_reason = "is blacklist_library_type"
|
||||
|
||||
if library_title.lower() in [x.lower() for x in blacklist_library]:
|
||||
skip_reason = "is blacklist_library"
|
||||
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
if library_other:
|
||||
if library_other.lower() in [x.lower() for x in blacklist_library]:
|
||||
skip_reason = "is blacklist_library"
|
||||
|
||||
if len(whitelist_library_type) > 0:
|
||||
if library_type.lower() not in whitelist_library_type:
|
||||
skip_reason = "is not whitelist_library_type"
|
||||
|
||||
# if whitelist is not empty and library is not in whitelist
|
||||
if len(whitelist_library) > 0:
|
||||
if library_title.lower() not in [x.lower() for x in whitelist_library]:
|
||||
skip_reason = "is not whitelist_library"
|
||||
|
||||
if library_other:
|
||||
if library_other.lower() not in [x.lower() for x in whitelist_library]:
|
||||
skip_reason = "is not whitelist_library"
|
||||
|
||||
return skip_reason
|
||||
|
||||
|
||||
def generate_library_guids_dict(user_list: dict):
|
||||
show_output_dict = {}
|
||||
episode_output_dict = {}
|
||||
movies_output_dict = {}
|
||||
|
||||
try:
|
||||
show_output_keys = user_list.keys()
|
||||
show_output_keys = [dict(x) for x in list(show_output_keys)]
|
||||
for show_key in show_output_keys:
|
||||
for provider_key, provider_value in show_key.items():
|
||||
# Skip title
|
||||
if provider_key.lower() == "title":
|
||||
continue
|
||||
if provider_key.lower() not in show_output_dict:
|
||||
show_output_dict[provider_key.lower()] = []
|
||||
if provider_key.lower() == "locations":
|
||||
for show_location in provider_value:
|
||||
show_output_dict[provider_key.lower()].append(show_location)
|
||||
else:
|
||||
show_output_dict[provider_key.lower()].append(
|
||||
provider_value.lower()
|
||||
)
|
||||
except Exception:
|
||||
logger("Generating show_output_dict failed, skipping", 1)
|
||||
|
||||
try:
|
||||
for show in user_list:
|
||||
for season in user_list[show]:
|
||||
for episode in user_list[show][season]:
|
||||
for episode_key, episode_value in episode.items():
|
||||
if episode_key.lower() not in episode_output_dict:
|
||||
episode_output_dict[episode_key.lower()] = []
|
||||
if episode_key == "locations":
|
||||
for episode_location in episode_value:
|
||||
episode_output_dict[episode_key.lower()].append(
|
||||
episode_location
|
||||
)
|
||||
else:
|
||||
episode_output_dict[episode_key.lower()].append(
|
||||
episode_value.lower()
|
||||
)
|
||||
except Exception:
|
||||
logger("Generating episode_output_dict failed, skipping", 1)
|
||||
|
||||
try:
|
||||
for movie in user_list:
|
||||
for movie_key, movie_value in movie.items():
|
||||
if movie_key.lower() not in movies_output_dict:
|
||||
movies_output_dict[movie_key.lower()] = []
|
||||
if movie_key == "locations":
|
||||
for movie_location in movie_value:
|
||||
movies_output_dict[movie_key.lower()].append(movie_location)
|
||||
else:
|
||||
movies_output_dict[movie_key.lower()].append(movie_value.lower())
|
||||
except Exception:
|
||||
logger("Generating movies_output_dict failed, skipping", 1)
|
||||
|
||||
return show_output_dict, episode_output_dict, movies_output_dict
|
||||
|
||||
|
||||
def combine_watched_dicts(dicts: list):
|
||||
combined_dict = {}
|
||||
for single_dict in dicts:
|
||||
for key, value in single_dict.items():
|
||||
if key not in combined_dict:
|
||||
combined_dict[key] = {}
|
||||
for subkey, subvalue in value.items():
|
||||
combined_dict[key][subkey] = subvalue
|
||||
|
||||
return combined_dict
|
||||
|
||||
|
||||
def future_thread_executor(args: list, workers: int = -1):
|
||||
futures_list = []
|
||||
results = []
|
||||
|
||||
if workers == -1:
|
||||
workers = min(32, os.cpu_count() * 1.25)
|
||||
workers = min(int(os.getenv("MAX_THREADS", 32)), os.cpu_count() * 2)
|
||||
if threads:
|
||||
workers = min(threads, workers)
|
||||
|
||||
if override_threads:
|
||||
workers = threads
|
||||
|
||||
# If only one worker, run in main thread to avoid overhead
|
||||
if workers == 1:
|
||||
results = []
|
||||
for arg in args:
|
||||
results.append(arg[0](*arg[1:]))
|
||||
return results
|
||||
|
||||
with ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
for arg in args:
|
||||
|
||||
671
src/jellyfin.py
671
src/jellyfin.py
@@ -1,646 +1,25 @@
|
||||
import asyncio, aiohttp
|
||||
from src.functions import (
|
||||
logger,
|
||||
search_mapping,
|
||||
check_skip_logic,
|
||||
generate_library_guids_dict,
|
||||
combine_watched_dicts,
|
||||
)
|
||||
|
||||
|
||||
class Jellyfin:
|
||||
def __init__(self, baseurl, token):
|
||||
self.baseurl = baseurl
|
||||
self.token = token
|
||||
|
||||
if not self.baseurl:
|
||||
raise Exception("Jellyfin baseurl not set")
|
||||
|
||||
if not self.token:
|
||||
raise Exception("Jellyfin token not set")
|
||||
|
||||
self.users = asyncio.run(self.get_users())
|
||||
|
||||
async def query(self, query, query_type, session, identifiers=None):
|
||||
try:
|
||||
results = None
|
||||
headers = {"Accept": "application/json", "X-Emby-Token": self.token}
|
||||
authorization = (
|
||||
"MediaBrowser , "
|
||||
'Client="other", '
|
||||
'Device="script", '
|
||||
'DeviceId="script", '
|
||||
'Version="0.0.0"'
|
||||
)
|
||||
headers["X-Emby-Authorization"] = authorization
|
||||
|
||||
if query_type == "get":
|
||||
async with session.get(
|
||||
self.baseurl + query, headers=headers
|
||||
) as response:
|
||||
results = await response.json()
|
||||
|
||||
elif query_type == "post":
|
||||
async with session.post(
|
||||
self.baseurl + query, headers=headers
|
||||
) as response:
|
||||
results = await response.json()
|
||||
|
||||
# append identifiers to results
|
||||
if identifiers:
|
||||
results["Identifiers"] = identifiers
|
||||
return results
|
||||
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Query failed {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
async def get_users(self):
|
||||
try:
|
||||
users = {}
|
||||
|
||||
query_string = "/Users"
|
||||
async with aiohttp.ClientSession() as session:
|
||||
response = await self.query(query_string, "get", session)
|
||||
|
||||
# If reponse is not empty
|
||||
if response:
|
||||
for user in response:
|
||||
users[user["Name"]] = user["Id"]
|
||||
|
||||
return users
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Get users failed {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
async def get_user_watched(
|
||||
self, user_name, user_id, library_type, library_id, library_title
|
||||
):
|
||||
try:
|
||||
user_name = user_name.lower()
|
||||
user_watched = {}
|
||||
user_watched[user_name] = {}
|
||||
|
||||
logger(
|
||||
f"Jellyfin: Generating watched for {user_name} in library {library_title}",
|
||||
0,
|
||||
)
|
||||
# Movies
|
||||
async with aiohttp.ClientSession() as session:
|
||||
if library_type == "Movie":
|
||||
user_watched[user_name][library_title] = []
|
||||
watched = await self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&Filters=IsPlayed&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
for movie in watched["Items"]:
|
||||
if movie["UserData"]["Played"] is True:
|
||||
movie_guids = {}
|
||||
movie_guids["title"] = movie["Name"]
|
||||
if "ProviderIds" in movie:
|
||||
# Lowercase movie["ProviderIds"] keys
|
||||
movie_guids = {
|
||||
k.lower(): v
|
||||
for k, v in movie["ProviderIds"].items()
|
||||
}
|
||||
if "MediaSources" in movie:
|
||||
movie_guids["locations"] = tuple(
|
||||
[
|
||||
x["Path"].split("/")[-1]
|
||||
for x in movie["MediaSources"]
|
||||
]
|
||||
)
|
||||
user_watched[user_name][library_title].append(movie_guids)
|
||||
|
||||
# TV Shows
|
||||
if library_type == "Series":
|
||||
user_watched[user_name][library_title] = {}
|
||||
watched_shows = await self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&isPlaceHolder=false&Fields=ProviderIds,Path,RecursiveItemCount",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
watched_shows_filtered = []
|
||||
for show in watched_shows["Items"]:
|
||||
if "PlayedPercentage" in show["UserData"]:
|
||||
if show["UserData"]["PlayedPercentage"] > 0:
|
||||
watched_shows_filtered.append(show)
|
||||
seasons_tasks = []
|
||||
for show in watched_shows_filtered:
|
||||
show_guids = {
|
||||
k.lower(): v for k, v in show["ProviderIds"].items()
|
||||
}
|
||||
show_guids["title"] = show["Name"]
|
||||
show_guids["locations"] = tuple([show["Path"].split("/")[-1]])
|
||||
show_guids = frozenset(show_guids.items())
|
||||
identifiers = {"show_guids": show_guids, "show_id": show["Id"]}
|
||||
task = asyncio.ensure_future(
|
||||
self.query(
|
||||
f"/Shows/{show['Id']}/Seasons"
|
||||
+ f"?userId={user_id}&isPlaceHolder=false&Fields=ProviderIds,RecursiveItemCount",
|
||||
"get",
|
||||
session,
|
||||
frozenset(identifiers.items()),
|
||||
)
|
||||
)
|
||||
seasons_tasks.append(task)
|
||||
|
||||
seasons_watched = await asyncio.gather(*seasons_tasks)
|
||||
seasons_watched_filtered = []
|
||||
|
||||
for seasons in seasons_watched:
|
||||
seasons_watched_filtered_dict = {}
|
||||
seasons_watched_filtered_dict["Identifiers"] = seasons[
|
||||
"Identifiers"
|
||||
]
|
||||
seasons_watched_filtered_dict["Items"] = []
|
||||
for season in seasons["Items"]:
|
||||
if "PlayedPercentage" in season["UserData"]:
|
||||
if season["UserData"]["PlayedPercentage"] > 0:
|
||||
seasons_watched_filtered_dict["Items"].append(
|
||||
season
|
||||
)
|
||||
|
||||
if seasons_watched_filtered_dict["Items"]:
|
||||
seasons_watched_filtered.append(
|
||||
seasons_watched_filtered_dict
|
||||
)
|
||||
|
||||
episodes_tasks = []
|
||||
for seasons in seasons_watched_filtered:
|
||||
if len(seasons["Items"]) > 0:
|
||||
for season in seasons["Items"]:
|
||||
season_identifiers = dict(seasons["Identifiers"])
|
||||
season_identifiers["season_id"] = season["Id"]
|
||||
season_identifiers["season_name"] = season["Name"]
|
||||
task = asyncio.ensure_future(
|
||||
self.query(
|
||||
f"/Shows/{season_identifiers['show_id']}/Episodes"
|
||||
+ f"?seasonId={season['Id']}&userId={user_id}&isPlaceHolder=false&isPlayed=true&Fields=ProviderIds,MediaSources",
|
||||
"get",
|
||||
session,
|
||||
frozenset(season_identifiers.items()),
|
||||
)
|
||||
)
|
||||
episodes_tasks.append(task)
|
||||
|
||||
watched_episodes = await asyncio.gather(*episodes_tasks)
|
||||
for episodes in watched_episodes:
|
||||
if len(episodes["Items"]) > 0:
|
||||
for episode in episodes["Items"]:
|
||||
if episode["UserData"]["Played"] is True:
|
||||
if (
|
||||
"ProviderIds" in episode
|
||||
or "MediaSources" in episode
|
||||
):
|
||||
episode_identifiers = dict(
|
||||
episodes["Identifiers"]
|
||||
)
|
||||
show_guids = episode_identifiers["show_guids"]
|
||||
if (
|
||||
show_guids
|
||||
not in user_watched[user_name][
|
||||
library_title
|
||||
]
|
||||
):
|
||||
user_watched[user_name][library_title][
|
||||
show_guids
|
||||
] = {}
|
||||
if (
|
||||
episode_identifiers["season_name"]
|
||||
not in user_watched[user_name][
|
||||
library_title
|
||||
][show_guids]
|
||||
):
|
||||
user_watched[user_name][library_title][
|
||||
show_guids
|
||||
][episode_identifiers["season_name"]] = []
|
||||
|
||||
episode_guids = {}
|
||||
if "ProviderIds" in episode:
|
||||
episode_guids = {
|
||||
k.lower(): v
|
||||
for k, v in episode[
|
||||
"ProviderIds"
|
||||
].items()
|
||||
}
|
||||
if "MediaSources" in episode:
|
||||
episode_guids["locations"] = tuple(
|
||||
[
|
||||
x["Path"].split("/")[-1]
|
||||
for x in episode["MediaSources"]
|
||||
]
|
||||
)
|
||||
user_watched[user_name][library_title][
|
||||
show_guids
|
||||
][episode_identifiers["season_name"]].append(
|
||||
episode_guids
|
||||
)
|
||||
|
||||
return user_watched
|
||||
except Exception as e:
|
||||
logger(
|
||||
f"Jellyfin: Failed to get watched for {user_name} in library {library_title}, Error: {e}",
|
||||
2,
|
||||
)
|
||||
raise Exception(e)
|
||||
|
||||
async def get_users_watched(
|
||||
self,
|
||||
user_name,
|
||||
user_id,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
):
|
||||
try:
|
||||
# Get all libraries
|
||||
user_name = user_name.lower()
|
||||
tasks_watched = []
|
||||
|
||||
tasks_libraries = []
|
||||
async with aiohttp.ClientSession() as session:
|
||||
libraries = await self.query(f"/Users/{user_id}/Views", "get", session)
|
||||
for library in libraries["Items"]:
|
||||
library_id = library["Id"]
|
||||
library_title = library["Name"]
|
||||
identifiers = {
|
||||
"library_id": library_id,
|
||||
"library_title": library_title,
|
||||
}
|
||||
task = asyncio.ensure_future(
|
||||
self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&Filters=IsPlayed&limit=1",
|
||||
"get",
|
||||
session,
|
||||
identifiers=identifiers,
|
||||
)
|
||||
)
|
||||
tasks_libraries.append(task)
|
||||
|
||||
libraries = await asyncio.gather(
|
||||
*tasks_libraries, return_exceptions=True
|
||||
)
|
||||
|
||||
for watched in libraries:
|
||||
if len(watched["Items"]) == 0:
|
||||
continue
|
||||
|
||||
library_id = watched["Identifiers"]["library_id"]
|
||||
library_title = watched["Identifiers"]["library_title"]
|
||||
library_type = watched["Items"][0]["Type"]
|
||||
|
||||
skip_reason = check_skip_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
if skip_reason:
|
||||
logger(
|
||||
f"Jellyfin: Skipping library {library_title} {skip_reason}",
|
||||
1,
|
||||
)
|
||||
continue
|
||||
|
||||
# Get watched for user
|
||||
task = asyncio.ensure_future(
|
||||
self.get_user_watched(
|
||||
user_name, user_id, library_type, library_id, library_title
|
||||
)
|
||||
)
|
||||
tasks_watched.append(task)
|
||||
|
||||
watched = await asyncio.gather(*tasks_watched, return_exceptions=True)
|
||||
return watched
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Failed to get users watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
async def get_watched(
|
||||
self,
|
||||
users,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping=None,
|
||||
):
|
||||
try:
|
||||
users_watched = {}
|
||||
watched = []
|
||||
|
||||
for user_name, user_id in users.items():
|
||||
watched.append(
|
||||
await self.get_users_watched(
|
||||
user_name,
|
||||
user_id,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
)
|
||||
|
||||
for user_watched in watched:
|
||||
user_watched_temp = combine_watched_dicts(user_watched)
|
||||
for user, user_watched_temp in user_watched_temp.items():
|
||||
if user not in users_watched:
|
||||
users_watched[user] = {}
|
||||
users_watched[user].update(user_watched_temp)
|
||||
|
||||
return users_watched
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Failed to get watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
async def update_user_watched(
|
||||
self, user_name, user_id, library, library_id, videos, dryrun
|
||||
):
|
||||
try:
|
||||
logger(
|
||||
f"Jellyfin: Updating watched for {user_name} in library {library}", 1
|
||||
)
|
||||
(
|
||||
videos_shows_ids,
|
||||
videos_episodes_ids,
|
||||
videos_movies_ids,
|
||||
) = generate_library_guids_dict(videos)
|
||||
|
||||
logger(
|
||||
f"Jellyfin: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}",
|
||||
1,
|
||||
)
|
||||
async with aiohttp.ClientSession() as session:
|
||||
if videos_movies_ids:
|
||||
jellyfin_search = await self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}"
|
||||
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
for jellyfin_video in jellyfin_search["Items"]:
|
||||
movie_found = False
|
||||
|
||||
if "MediaSources" in jellyfin_video:
|
||||
for movie_location in jellyfin_video["MediaSources"]:
|
||||
if (
|
||||
movie_location["Path"].split("/")[-1]
|
||||
in videos_movies_ids["locations"]
|
||||
):
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if not movie_found:
|
||||
for (
|
||||
movie_provider_source,
|
||||
movie_provider_id,
|
||||
) in jellyfin_video["ProviderIds"].items():
|
||||
if movie_provider_source.lower() in videos_movies_ids:
|
||||
if (
|
||||
movie_provider_id.lower()
|
||||
in videos_movies_ids[
|
||||
movie_provider_source.lower()
|
||||
]
|
||||
):
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if movie_found:
|
||||
jellyfin_video_id = jellyfin_video["Id"]
|
||||
msg = f"{jellyfin_video['Name']} as watched for {user_name} in {library} for Jellyfin"
|
||||
if not dryrun:
|
||||
logger(f"Marking {msg}", 0)
|
||||
await self.query(
|
||||
f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}",
|
||||
"post",
|
||||
session,
|
||||
)
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: Skipping movie {jellyfin_video['Name']} as it is not in mark list for {user_name}",
|
||||
1,
|
||||
)
|
||||
|
||||
# TV Shows
|
||||
if videos_shows_ids and videos_episodes_ids:
|
||||
jellyfin_search = await self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}"
|
||||
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,Path",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
jellyfin_shows = [x for x in jellyfin_search["Items"]]
|
||||
|
||||
for jellyfin_show in jellyfin_shows:
|
||||
show_found = False
|
||||
|
||||
if "Path" in jellyfin_show:
|
||||
if (
|
||||
jellyfin_show["Path"].split("/")[-1]
|
||||
in videos_shows_ids["locations"]
|
||||
):
|
||||
show_found = True
|
||||
|
||||
if not show_found:
|
||||
for show_provider_source, show_provider_id in jellyfin_show[
|
||||
"ProviderIds"
|
||||
].items():
|
||||
if show_provider_source.lower() in videos_shows_ids:
|
||||
if (
|
||||
show_provider_id.lower()
|
||||
in videos_shows_ids[
|
||||
show_provider_source.lower()
|
||||
]
|
||||
):
|
||||
show_found = True
|
||||
break
|
||||
|
||||
if show_found:
|
||||
logger(
|
||||
f"Jellyfin: Updating watched for {user_name} in library {library} for show {jellyfin_show['Name']}",
|
||||
1,
|
||||
)
|
||||
jellyfin_show_id = jellyfin_show["Id"]
|
||||
jellyfin_episodes = await self.query(
|
||||
f"/Shows/{jellyfin_show_id}/Episodes"
|
||||
+ f"?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
|
||||
for jellyfin_episode in jellyfin_episodes["Items"]:
|
||||
episode_found = False
|
||||
|
||||
if "MediaSources" in jellyfin_episode:
|
||||
for episode_location in jellyfin_episode[
|
||||
"MediaSources"
|
||||
]:
|
||||
if (
|
||||
episode_location["Path"].split("/")[-1]
|
||||
in videos_episodes_ids["locations"]
|
||||
):
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if not episode_found:
|
||||
for (
|
||||
episode_provider_source,
|
||||
episode_provider_id,
|
||||
) in jellyfin_episode["ProviderIds"].items():
|
||||
if (
|
||||
episode_provider_source.lower()
|
||||
in videos_episodes_ids
|
||||
):
|
||||
if (
|
||||
episode_provider_id.lower()
|
||||
in videos_episodes_ids[
|
||||
episode_provider_source.lower()
|
||||
]
|
||||
):
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if episode_found:
|
||||
jellyfin_episode_id = jellyfin_episode["Id"]
|
||||
msg = (
|
||||
f"{jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode['Name']}"
|
||||
+ f" as watched for {user_name} in {library} for Jellyfin"
|
||||
)
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
await self.query(
|
||||
f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}",
|
||||
"post",
|
||||
session,
|
||||
)
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: Skipping episode {jellyfin_episode['Name']} as it is not in mark list for {user_name}",
|
||||
1,
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: Skipping show {jellyfin_show['Name']} as it is not in mark list for {user_name}",
|
||||
1,
|
||||
)
|
||||
|
||||
if (
|
||||
not videos_movies_ids
|
||||
and not videos_shows_ids
|
||||
and not videos_episodes_ids
|
||||
):
|
||||
logger(
|
||||
f"Jellyfin: No videos to mark as watched for {user_name} in library {library}",
|
||||
1,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger(
|
||||
f"Jellyfin: Error updating watched for {user_name} in library {library}, {e}",
|
||||
2,
|
||||
)
|
||||
raise Exception(e)
|
||||
|
||||
async def update_watched(
|
||||
self, watched_list, user_mapping=None, library_mapping=None, dryrun=False
|
||||
):
|
||||
try:
|
||||
tasks = []
|
||||
async with aiohttp.ClientSession() as session:
|
||||
for user, libraries in watched_list.items():
|
||||
logger(f"Jellyfin: Updating for entry {user}, {libraries}", 1)
|
||||
user_other = None
|
||||
user_name = None
|
||||
if user_mapping:
|
||||
if user in user_mapping.keys():
|
||||
user_other = user_mapping[user]
|
||||
elif user in user_mapping.values():
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
|
||||
user_id = None
|
||||
for key in self.users.keys():
|
||||
if user.lower() == key.lower():
|
||||
user_id = self.users[key]
|
||||
user_name = key
|
||||
break
|
||||
elif user_other and user_other.lower() == key.lower():
|
||||
user_id = self.users[key]
|
||||
user_name = key
|
||||
break
|
||||
|
||||
if not user_id:
|
||||
logger(f"{user} {user_other} not found in Jellyfin", 2)
|
||||
continue
|
||||
|
||||
jellyfin_libraries = await self.query(
|
||||
f"/Users/{user_id}/Views", "get", session
|
||||
)
|
||||
jellyfin_libraries = [x for x in jellyfin_libraries["Items"]]
|
||||
|
||||
for library, videos in libraries.items():
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
if library in library_mapping.keys():
|
||||
library_other = library_mapping[library]
|
||||
elif library in library_mapping.values():
|
||||
library_other = search_mapping(library_mapping, library)
|
||||
|
||||
if library.lower() not in [
|
||||
x["Name"].lower() for x in jellyfin_libraries
|
||||
]:
|
||||
if library_other:
|
||||
if library_other.lower() in [
|
||||
x["Name"].lower() for x in jellyfin_libraries
|
||||
]:
|
||||
logger(
|
||||
f"Jellyfin: Library {library} not found, but {library_other} found, using {library_other}",
|
||||
1,
|
||||
)
|
||||
library = library_other
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: Library {library} or {library_other} not found in library list",
|
||||
2,
|
||||
)
|
||||
continue
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: Library {library} not found in library list",
|
||||
2,
|
||||
)
|
||||
continue
|
||||
|
||||
library_id = None
|
||||
for jellyfin_library in jellyfin_libraries:
|
||||
if jellyfin_library["Name"] == library:
|
||||
library_id = jellyfin_library["Id"]
|
||||
continue
|
||||
|
||||
if library_id:
|
||||
task = self.update_user_watched(
|
||||
user_name, user_id, library, library_id, videos, dryrun
|
||||
)
|
||||
tasks.append(task)
|
||||
|
||||
await asyncio.gather(*tasks, return_exceptions=True)
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Error updating watched, {e}", 2)
|
||||
raise Exception(e)
|
||||
from src.jellyfin_emby import JellyfinEmby
|
||||
from packaging import version
|
||||
|
||||
|
||||
class Jellyfin(JellyfinEmby):
|
||||
def __init__(self, baseurl, token):
|
||||
authorization = (
|
||||
"MediaBrowser , "
|
||||
'Client="JellyPlex-Watched", '
|
||||
'Device="script", '
|
||||
'DeviceId="script", '
|
||||
'Version="6.0.2", '
|
||||
f'Token="{token}"'
|
||||
)
|
||||
headers = {
|
||||
"Accept": "application/json",
|
||||
"Authorization": authorization,
|
||||
}
|
||||
|
||||
super().__init__(
|
||||
server_type="Jellyfin", baseurl=baseurl, token=token, headers=headers
|
||||
)
|
||||
|
||||
def is_partial_update_supported(self, server_version):
|
||||
return server_version >= version.parse("10.9.0")
|
||||
|
||||
820
src/jellyfin_emby.py
Normal file
820
src/jellyfin_emby.py
Normal file
@@ -0,0 +1,820 @@
|
||||
# Functions for Jellyfin and Emby
|
||||
|
||||
import traceback, os
|
||||
from math import floor
|
||||
from dotenv import load_dotenv
|
||||
import requests
|
||||
from packaging import version
|
||||
|
||||
from src.functions import (
|
||||
logger,
|
||||
search_mapping,
|
||||
contains_nested,
|
||||
log_marked,
|
||||
str_to_bool,
|
||||
)
|
||||
from src.library import generate_library_guids_dict
|
||||
|
||||
load_dotenv(override=True)
|
||||
|
||||
generate_guids = str_to_bool(os.getenv("GENERATE_GUIDS", "True"))
|
||||
generate_locations = str_to_bool(os.getenv("GENERATE_LOCATIONS", "True"))
|
||||
|
||||
|
||||
def get_guids(server_type, item):
|
||||
if item.get("Name"):
|
||||
guids = {"title": item.get("Name")}
|
||||
else:
|
||||
logger(f"{server_type}: Name not found in {item.get('Id')}", 1)
|
||||
guids = {"title": None}
|
||||
|
||||
if "ProviderIds" in item:
|
||||
guids.update({k.lower(): v for k, v in item["ProviderIds"].items()})
|
||||
else:
|
||||
logger(f"{server_type}: ProviderIds not found in {item.get('Name')}", 1)
|
||||
|
||||
if "MediaSources" in item:
|
||||
guids["locations"] = tuple(
|
||||
[x["Path"].split("/")[-1] for x in item["MediaSources"] if "Path" in x]
|
||||
)
|
||||
else:
|
||||
logger(f"{server_type}: MediaSources not found in {item.get('Name')}", 1)
|
||||
guids["locations"] = tuple()
|
||||
|
||||
if "UserData" in item:
|
||||
guids["status"] = {
|
||||
"completed": item["UserData"]["Played"],
|
||||
# Convert ticks to milliseconds to match Plex
|
||||
"time": floor(item["UserData"]["PlaybackPositionTicks"] / 10000),
|
||||
}
|
||||
else:
|
||||
logger(f"{server_type}: UserData not found in {item.get('Name')}", 1)
|
||||
guids["status"] = {}
|
||||
|
||||
return guids
|
||||
|
||||
|
||||
def get_video_status(server_video, videos_ids, videos):
|
||||
video_status = None
|
||||
|
||||
if generate_locations:
|
||||
if "MediaSources" in server_video:
|
||||
for video_location in server_video["MediaSources"]:
|
||||
if "Path" in video_location:
|
||||
if (
|
||||
contains_nested(
|
||||
video_location["Path"].split("/")[-1],
|
||||
videos_ids["locations"],
|
||||
)
|
||||
is not None
|
||||
):
|
||||
for video in videos:
|
||||
if (
|
||||
contains_nested(
|
||||
video_location["Path"].split("/")[-1],
|
||||
video["locations"],
|
||||
)
|
||||
is not None
|
||||
):
|
||||
video_status = video["status"]
|
||||
break
|
||||
break
|
||||
|
||||
if generate_guids:
|
||||
if not video_status:
|
||||
for (
|
||||
video_provider_source,
|
||||
video_provider_id,
|
||||
) in server_video["ProviderIds"].items():
|
||||
if video_provider_source.lower() in videos_ids:
|
||||
if (
|
||||
video_provider_id.lower()
|
||||
in videos_ids[video_provider_source.lower()]
|
||||
):
|
||||
for video in videos:
|
||||
if video_provider_id.lower() in video.get(
|
||||
video_provider_source.lower(), []
|
||||
):
|
||||
video_status = video["status"]
|
||||
break
|
||||
break
|
||||
|
||||
return video_status
|
||||
|
||||
|
||||
class JellyfinEmby:
|
||||
def __init__(self, server_type, baseurl, token, headers):
|
||||
if server_type not in ["Jellyfin", "Emby"]:
|
||||
raise Exception(f"Server type {server_type} not supported")
|
||||
self.server_type = server_type
|
||||
self.baseurl = baseurl
|
||||
self.token = token
|
||||
self.headers = headers
|
||||
self.timeout = int(os.getenv("REQUEST_TIMEOUT", 300))
|
||||
|
||||
if not self.baseurl:
|
||||
raise Exception(f"{self.server_type} baseurl not set")
|
||||
|
||||
if not self.token:
|
||||
raise Exception(f"{self.server_type} token not set")
|
||||
|
||||
self.session = requests.Session()
|
||||
self.users = self.get_users()
|
||||
self.server_name = self.info(name_only=True)
|
||||
|
||||
def query(self, query, query_type, identifiers=None, json=None):
|
||||
try:
|
||||
results = None
|
||||
|
||||
if query_type == "get":
|
||||
response = self.session.get(
|
||||
self.baseurl + query, headers=self.headers, timeout=self.timeout
|
||||
)
|
||||
if response.status_code not in [200, 204]:
|
||||
raise Exception(
|
||||
f"Query failed with status {response.status_code} {response.reason}"
|
||||
)
|
||||
if response.status_code == 204:
|
||||
results = None
|
||||
else:
|
||||
results = response.json()
|
||||
|
||||
elif query_type == "post":
|
||||
response = self.session.post(
|
||||
self.baseurl + query,
|
||||
headers=self.headers,
|
||||
json=json,
|
||||
timeout=self.timeout,
|
||||
)
|
||||
if response.status_code not in [200, 204]:
|
||||
raise Exception(
|
||||
f"Query failed with status {response.status_code} {response.reason}"
|
||||
)
|
||||
if response.status_code == 204:
|
||||
results = None
|
||||
else:
|
||||
results = response.json()
|
||||
|
||||
if results is not None:
|
||||
if not isinstance(results, list) and not isinstance(results, dict):
|
||||
raise Exception("Query result is not of type list or dict")
|
||||
|
||||
# append identifiers to results
|
||||
if identifiers:
|
||||
results["Identifiers"] = identifiers
|
||||
|
||||
return results
|
||||
|
||||
except Exception as e:
|
||||
logger(
|
||||
f"{self.server_type}: Query {query_type} {query}\nResults {results}\n{e}",
|
||||
2,
|
||||
)
|
||||
raise Exception(e)
|
||||
|
||||
def info(self, name_only: bool = False) -> str:
|
||||
try:
|
||||
query_string = "/System/Info/Public"
|
||||
|
||||
response = self.query(query_string, "get")
|
||||
|
||||
if response:
|
||||
if name_only:
|
||||
return f"{response['ServerName']}"
|
||||
return f"{self.server_type} {response['ServerName']}: {response['Version']}"
|
||||
else:
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger(f"{self.server_type}: Get server name failed {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_server_version(self):
|
||||
try:
|
||||
response = self.query("/System/Info/Public", "get")
|
||||
|
||||
if response:
|
||||
return version.parse(response["Version"])
|
||||
else:
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger(f"{self.server_type}: Get server version failed: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_users(self):
|
||||
try:
|
||||
users = {}
|
||||
|
||||
query_string = "/Users"
|
||||
response = self.query(query_string, "get")
|
||||
|
||||
# If response is not empty
|
||||
if response:
|
||||
for user in response:
|
||||
users[user["Name"]] = user["Id"]
|
||||
|
||||
return users
|
||||
except Exception as e:
|
||||
logger(f"{self.server_type}: Get users failed {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_libraries(self):
|
||||
try:
|
||||
libraries = {}
|
||||
|
||||
# Theres no way to get all libraries so individually get list of libraries from all users
|
||||
users = self.get_users()
|
||||
|
||||
for _, user_id in users.items():
|
||||
user_libraries = self.query(f"/Users/{user_id}/Views", "get")
|
||||
for library in user_libraries["Items"]:
|
||||
library_id = library["Id"]
|
||||
library_title = library["Name"]
|
||||
|
||||
# Get library items to check the type
|
||||
media_info = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&Filters=IsPlayed&Recursive=True&excludeItemTypes=Folder&limit=100",
|
||||
"get",
|
||||
)
|
||||
|
||||
types = set(
|
||||
[
|
||||
x["Type"]
|
||||
for x in media_info["Items"]
|
||||
if x["Type"] in ["Movie", "Series", "Episode"]
|
||||
]
|
||||
)
|
||||
all_types = set([x["Type"] for x in media_info["Items"]])
|
||||
|
||||
if not types:
|
||||
logger(
|
||||
f"{self.server_type}: Skipping Library {library_title} found wanted types: {all_types}",
|
||||
1,
|
||||
)
|
||||
else:
|
||||
libraries[library_title] = str(types)
|
||||
|
||||
return libraries
|
||||
except Exception as e:
|
||||
logger(f"{self.server_type}: Get libraries failed {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_user_library_watched(
|
||||
self, user_name, user_id, library_type, library_id, library_title
|
||||
):
|
||||
try:
|
||||
user_name = user_name.lower()
|
||||
user_watched = {}
|
||||
|
||||
logger(
|
||||
f"{self.server_type}: Generating watched for {user_name} in library {library_title}",
|
||||
0,
|
||||
)
|
||||
|
||||
# Movies
|
||||
if library_type == "Movie":
|
||||
user_watched[library_title] = []
|
||||
watched = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&Filters=IsPlayed&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||
"get",
|
||||
).get("Items", [])
|
||||
|
||||
in_progress = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&Filters=IsResumable&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||
"get",
|
||||
).get("Items", [])
|
||||
|
||||
for movie in watched + in_progress:
|
||||
# Skip if theres no user data which means the movie has not been watched
|
||||
if "UserData" not in movie:
|
||||
continue
|
||||
|
||||
# Skip if theres no media tied to the movie
|
||||
if "MediaSources" not in movie or movie["MediaSources"] == {}:
|
||||
continue
|
||||
|
||||
# Skip if not watched or watched less than a minute
|
||||
if (
|
||||
movie["UserData"]["Played"] == True
|
||||
or movie["UserData"]["PlaybackPositionTicks"] > 600000000
|
||||
):
|
||||
logger(
|
||||
f"{self.server_type}: Adding {movie.get('Name')} to {user_name} watched list",
|
||||
3,
|
||||
)
|
||||
|
||||
# Get the movie's GUIDs
|
||||
movie_guids = get_guids(self.server_type, movie)
|
||||
|
||||
# Append the movie dictionary to the list for the given user and library
|
||||
user_watched[library_title].append(movie_guids)
|
||||
logger(
|
||||
f"{self.server_type}: Added {movie_guids} to {user_name} watched list",
|
||||
3,
|
||||
)
|
||||
|
||||
# TV Shows
|
||||
if library_type in ["Series", "Episode"]:
|
||||
# Initialize an empty dictionary for the given user and library
|
||||
user_watched[library_title] = {}
|
||||
|
||||
# Retrieve a list of watched TV shows
|
||||
watched_shows = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&isPlaceHolder=false&IncludeItemTypes=Series&Recursive=True&Fields=ProviderIds,Path,RecursiveItemCount",
|
||||
"get",
|
||||
).get("Items", [])
|
||||
|
||||
# Filter the list of shows to only include those that have been partially or fully watched
|
||||
watched_shows_filtered = []
|
||||
for show in watched_shows:
|
||||
if "UserData" not in show:
|
||||
continue
|
||||
|
||||
if "PlayedPercentage" in show["UserData"]:
|
||||
if show["UserData"]["PlayedPercentage"] > 0:
|
||||
watched_shows_filtered.append(show)
|
||||
|
||||
# Retrieve the watched/partially watched list of episodes of each watched show
|
||||
for show in watched_shows_filtered:
|
||||
logger(
|
||||
f"{self.server_type}: Adding {show.get('Name')} to {user_name} watched list",
|
||||
3,
|
||||
)
|
||||
show_guids = {k.lower(): v for k, v in show["ProviderIds"].items()}
|
||||
show_guids["title"] = show["Name"]
|
||||
show_guids["locations"] = (
|
||||
tuple([show["Path"].split("/")[-1]])
|
||||
if "Path" in show
|
||||
else tuple()
|
||||
)
|
||||
|
||||
show_guids = frozenset(show_guids.items())
|
||||
|
||||
show_episodes = self.query(
|
||||
f"/Shows/{show['Id']}/Episodes"
|
||||
+ f"?userId={user_id}&isPlaceHolder=false&Fields=ProviderIds,MediaSources",
|
||||
"get",
|
||||
).get("Items", [])
|
||||
|
||||
# Iterate through the episodes
|
||||
# Create a list to store the episodes
|
||||
mark_episodes_list = []
|
||||
for episode in show_episodes:
|
||||
if "UserData" not in episode:
|
||||
continue
|
||||
|
||||
if (
|
||||
"MediaSources" not in episode
|
||||
or episode["MediaSources"] == {}
|
||||
):
|
||||
continue
|
||||
|
||||
# If watched or watched more than a minute
|
||||
if (
|
||||
episode["UserData"]["Played"] == True
|
||||
or episode["UserData"]["PlaybackPositionTicks"] > 600000000
|
||||
):
|
||||
episode_guids = get_guids(self.server_type, episode)
|
||||
mark_episodes_list.append(episode_guids)
|
||||
|
||||
if mark_episodes_list:
|
||||
# Add the show dictionary to the user's watched list
|
||||
if show_guids not in user_watched[library_title]:
|
||||
user_watched[library_title][show_guids] = []
|
||||
|
||||
user_watched[library_title][show_guids] = mark_episodes_list
|
||||
for episode in mark_episodes_list:
|
||||
logger(
|
||||
f"{self.server_type}: Added {episode} to {user_name} watched list",
|
||||
3,
|
||||
)
|
||||
|
||||
logger(
|
||||
f"{self.server_type}: Got watched for {user_name} in library {library_title}",
|
||||
1,
|
||||
)
|
||||
if library_title in user_watched:
|
||||
logger(f"{self.server_type}: {user_watched[library_title]}", 3)
|
||||
|
||||
return user_watched
|
||||
except Exception as e:
|
||||
logger(
|
||||
f"{self.server_type}: Failed to get watched for {user_name} in library {library_title}, Error: {e}",
|
||||
2,
|
||||
)
|
||||
|
||||
logger(traceback.format_exc(), 2)
|
||||
return {}
|
||||
|
||||
def get_watched(self, users, sync_libraries):
|
||||
try:
|
||||
users_watched = {}
|
||||
watched = []
|
||||
|
||||
for user_name, user_id in users.items():
|
||||
libraries = []
|
||||
|
||||
all_libraries = self.query(f"/Users/{user_id}/Views", "get")
|
||||
for library in all_libraries["Items"]:
|
||||
library_id = library["Id"]
|
||||
library_title = library["Name"]
|
||||
|
||||
if library_title not in sync_libraries:
|
||||
continue
|
||||
|
||||
identifiers = {
|
||||
"library_id": library_id,
|
||||
"library_title": library_title,
|
||||
}
|
||||
libraries.append(
|
||||
self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&Filters=IsPlayed&Recursive=True&excludeItemTypes=Folder&limit=100",
|
||||
"get",
|
||||
identifiers=identifiers,
|
||||
)
|
||||
)
|
||||
|
||||
for library in libraries:
|
||||
if len(library["Items"]) == 0:
|
||||
continue
|
||||
|
||||
library_id = library["Identifiers"]["library_id"]
|
||||
library_title = library["Identifiers"]["library_title"]
|
||||
|
||||
# Get all library types excluding "Folder"
|
||||
types = set(
|
||||
[
|
||||
x["Type"]
|
||||
for x in library["Items"]
|
||||
if x["Type"] in ["Movie", "Series", "Episode"]
|
||||
]
|
||||
)
|
||||
|
||||
for library_type in types:
|
||||
# Get watched for user
|
||||
watched = self.get_user_library_watched(
|
||||
user_name,
|
||||
user_id,
|
||||
library_type,
|
||||
library_id,
|
||||
library_title,
|
||||
)
|
||||
|
||||
if user_name.lower() not in users_watched:
|
||||
users_watched[user_name.lower()] = {}
|
||||
users_watched[user_name.lower()].update(watched)
|
||||
|
||||
return users_watched
|
||||
except Exception as e:
|
||||
logger(f"{self.server_type}: Failed to get watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def update_user_watched(
|
||||
self, user_name, user_id, library, library_id, videos, update_partial, dryrun
|
||||
):
|
||||
try:
|
||||
logger(
|
||||
f"{self.server_type}: Updating watched for {user_name} in library {library}",
|
||||
1,
|
||||
)
|
||||
(
|
||||
videos_shows_ids,
|
||||
videos_episodes_ids,
|
||||
videos_movies_ids,
|
||||
) = generate_library_guids_dict(videos)
|
||||
|
||||
if (
|
||||
not videos_movies_ids
|
||||
and not videos_shows_ids
|
||||
and not videos_episodes_ids
|
||||
):
|
||||
logger(
|
||||
f"{self.server_type}: No videos to mark as watched for {user_name} in library {library}",
|
||||
1,
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
logger(
|
||||
f"{self.server_type}: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}",
|
||||
1,
|
||||
)
|
||||
|
||||
if videos_movies_ids:
|
||||
jellyfin_search = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=True&ParentId={library_id}"
|
||||
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources&IncludeItemTypes=Movie",
|
||||
"get",
|
||||
)
|
||||
for jellyfin_video in jellyfin_search["Items"]:
|
||||
movie_status = get_video_status(
|
||||
jellyfin_video, videos_movies_ids, videos
|
||||
)
|
||||
|
||||
if movie_status:
|
||||
jellyfin_video_id = jellyfin_video["Id"]
|
||||
if movie_status["completed"]:
|
||||
msg = f"{self.server_type}: {jellyfin_video.get('Name')} as watched for {user_name} in {library}"
|
||||
if not dryrun:
|
||||
logger(msg, 5)
|
||||
self.query(
|
||||
f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}",
|
||||
"post",
|
||||
)
|
||||
else:
|
||||
logger(msg, 6)
|
||||
|
||||
log_marked(
|
||||
self.server_type,
|
||||
self.server_name,
|
||||
user_name,
|
||||
library,
|
||||
jellyfin_video.get("Name"),
|
||||
)
|
||||
elif update_partial:
|
||||
msg = f"{self.server_type}: {jellyfin_video.get('Name')} as partially watched for {floor(movie_status['time'] / 60_000)} minutes for {user_name} in {library}"
|
||||
|
||||
if not dryrun:
|
||||
logger(msg, 5)
|
||||
playback_position_payload = {
|
||||
"PlaybackPositionTicks": movie_status["time"]
|
||||
* 10_000,
|
||||
}
|
||||
self.query(
|
||||
f"/Users/{user_id}/Items/{jellyfin_video_id}/UserData",
|
||||
"post",
|
||||
json=playback_position_payload,
|
||||
)
|
||||
else:
|
||||
logger(msg, 6)
|
||||
|
||||
log_marked(
|
||||
self.server_type,
|
||||
self.server_name,
|
||||
user_name,
|
||||
library,
|
||||
jellyfin_video.get("Name"),
|
||||
duration=floor(movie_status["time"] / 60_000),
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"{self.server_type}: Skipping movie {jellyfin_video.get('Name')} as it is not in mark list for {user_name}",
|
||||
3,
|
||||
)
|
||||
|
||||
# TV Shows
|
||||
if videos_shows_ids and videos_episodes_ids:
|
||||
jellyfin_search = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=True&ParentId={library_id}"
|
||||
+ "&Fields=ItemCounts,ProviderIds,Path&IncludeItemTypes=Series",
|
||||
"get",
|
||||
)
|
||||
jellyfin_shows = [x for x in jellyfin_search["Items"]]
|
||||
|
||||
for jellyfin_show in jellyfin_shows:
|
||||
show_found = False
|
||||
episode_videos = []
|
||||
|
||||
if generate_locations:
|
||||
if "Path" in jellyfin_show:
|
||||
if (
|
||||
contains_nested(
|
||||
jellyfin_show["Path"].split("/")[-1],
|
||||
videos_shows_ids["locations"],
|
||||
)
|
||||
is not None
|
||||
):
|
||||
show_found = True
|
||||
for shows, episodes in videos.items():
|
||||
show = {k: v for k, v in shows}
|
||||
if (
|
||||
contains_nested(
|
||||
jellyfin_show["Path"].split("/")[-1],
|
||||
show["locations"],
|
||||
)
|
||||
is not None
|
||||
):
|
||||
for episode in episodes:
|
||||
episode_videos.append(episode)
|
||||
|
||||
break
|
||||
|
||||
if generate_guids:
|
||||
if not show_found:
|
||||
for show_provider_source, show_provider_id in jellyfin_show[
|
||||
"ProviderIds"
|
||||
].items():
|
||||
if show_provider_source.lower() in videos_shows_ids:
|
||||
if (
|
||||
show_provider_id.lower()
|
||||
in videos_shows_ids[
|
||||
show_provider_source.lower()
|
||||
]
|
||||
):
|
||||
show_found = True
|
||||
for show, episodes in videos.items():
|
||||
show = {k: v for k, v in show}
|
||||
if show_provider_id.lower() in show.get(
|
||||
show_provider_source.lower(), []
|
||||
):
|
||||
for episode in episodes:
|
||||
episode_videos.append(episode)
|
||||
|
||||
break
|
||||
|
||||
if show_found:
|
||||
logger(
|
||||
f"{self.server_type}: Updating watched for {user_name} in library {library} for show {jellyfin_show.get('Name')}",
|
||||
1,
|
||||
)
|
||||
jellyfin_show_id = jellyfin_show["Id"]
|
||||
jellyfin_episodes = self.query(
|
||||
f"/Shows/{jellyfin_show_id}/Episodes"
|
||||
+ f"?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||
"get",
|
||||
)
|
||||
|
||||
for jellyfin_episode in jellyfin_episodes["Items"]:
|
||||
episode_status = get_video_status(
|
||||
jellyfin_episode, videos_episodes_ids, episode_videos
|
||||
)
|
||||
|
||||
if episode_status:
|
||||
jellyfin_episode_id = jellyfin_episode["Id"]
|
||||
if episode_status["completed"]:
|
||||
msg = (
|
||||
f"{self.server_type}: {jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}"
|
||||
+ f" as watched for {user_name} in {library}"
|
||||
)
|
||||
if not dryrun:
|
||||
logger(msg, 5)
|
||||
self.query(
|
||||
f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}",
|
||||
"post",
|
||||
)
|
||||
else:
|
||||
logger(msg, 6)
|
||||
|
||||
log_marked(
|
||||
self.server_type,
|
||||
self.server_name,
|
||||
user_name,
|
||||
library,
|
||||
jellyfin_episode.get("SeriesName"),
|
||||
jellyfin_episode.get("Name"),
|
||||
)
|
||||
elif update_partial:
|
||||
msg = (
|
||||
f"{self.server_type}: {jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}"
|
||||
+ f" as partially watched for {floor(episode_status['time'] / 60_000)} minutes for {user_name} in {library}"
|
||||
)
|
||||
|
||||
if not dryrun:
|
||||
logger(msg, 5)
|
||||
playback_position_payload = {
|
||||
"PlaybackPositionTicks": episode_status[
|
||||
"time"
|
||||
]
|
||||
* 10_000,
|
||||
}
|
||||
self.query(
|
||||
f"/Users/{user_id}/Items/{jellyfin_episode_id}/UserData",
|
||||
"post",
|
||||
json=playback_position_payload,
|
||||
)
|
||||
else:
|
||||
logger(msg, 6)
|
||||
|
||||
log_marked(
|
||||
self.server_type,
|
||||
self.server_name,
|
||||
user_name,
|
||||
library,
|
||||
jellyfin_episode.get("SeriesName"),
|
||||
jellyfin_episode.get("Name"),
|
||||
duration=floor(episode_status["time"] / 60_000),
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"{self.server_type}: Skipping episode {jellyfin_episode.get('Name')} as it is not in mark list for {user_name}",
|
||||
3,
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"{self.server_type}: Skipping show {jellyfin_show.get('Name')} as it is not in mark list for {user_name}",
|
||||
3,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger(
|
||||
f"{self.server_type}: Error updating watched for {user_name} in library {library}, {e}",
|
||||
2,
|
||||
)
|
||||
logger(traceback.format_exc(), 2)
|
||||
raise Exception(e)
|
||||
|
||||
def update_watched(
|
||||
self, watched_list, user_mapping=None, library_mapping=None, dryrun=False
|
||||
):
|
||||
try:
|
||||
server_version = self.get_server_version()
|
||||
update_partial = self.is_partial_update_supported(server_version)
|
||||
|
||||
if not update_partial:
|
||||
logger(
|
||||
f"{self.server_type}: Server version {server_version} does not support updating playback position.",
|
||||
2,
|
||||
)
|
||||
|
||||
for user, libraries in watched_list.items():
|
||||
logger(f"{self.server_type}: Updating for entry {user}, {libraries}", 1)
|
||||
user_other = None
|
||||
user_name = None
|
||||
if user_mapping:
|
||||
if user in user_mapping.keys():
|
||||
user_other = user_mapping[user]
|
||||
elif user in user_mapping.values():
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
|
||||
user_id = None
|
||||
for key in self.users:
|
||||
if user.lower() == key.lower():
|
||||
user_id = self.users[key]
|
||||
user_name = key
|
||||
break
|
||||
elif user_other and user_other.lower() == key.lower():
|
||||
user_id = self.users[key]
|
||||
user_name = key
|
||||
break
|
||||
|
||||
if not user_id:
|
||||
logger(f"{user} {user_other} not found in Jellyfin", 2)
|
||||
continue
|
||||
|
||||
jellyfin_libraries = self.query(
|
||||
f"/Users/{user_id}/Views",
|
||||
"get",
|
||||
)
|
||||
jellyfin_libraries = [x for x in jellyfin_libraries["Items"]]
|
||||
|
||||
for library, videos in libraries.items():
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
if library in library_mapping.keys():
|
||||
library_other = library_mapping[library]
|
||||
elif library in library_mapping.values():
|
||||
library_other = search_mapping(library_mapping, library)
|
||||
|
||||
if library.lower() not in [
|
||||
x["Name"].lower() for x in jellyfin_libraries
|
||||
]:
|
||||
if library_other:
|
||||
if library_other.lower() in [
|
||||
x["Name"].lower() for x in jellyfin_libraries
|
||||
]:
|
||||
logger(
|
||||
f"{self.server_type}: Library {library} not found, but {library_other} found, using {library_other}",
|
||||
1,
|
||||
)
|
||||
library = library_other
|
||||
else:
|
||||
logger(
|
||||
f"{self.server_type}: Library {library} or {library_other} not found in library list",
|
||||
1,
|
||||
)
|
||||
continue
|
||||
else:
|
||||
logger(
|
||||
f"{self.server_type}: Library {library} not found in library list",
|
||||
1,
|
||||
)
|
||||
continue
|
||||
|
||||
library_id = None
|
||||
for jellyfin_library in jellyfin_libraries:
|
||||
if jellyfin_library["Name"] == library:
|
||||
library_id = jellyfin_library["Id"]
|
||||
continue
|
||||
|
||||
if library_id:
|
||||
self.update_user_watched(
|
||||
user_name,
|
||||
user_id,
|
||||
library,
|
||||
library_id,
|
||||
videos,
|
||||
update_partial,
|
||||
dryrun,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger(f"{self.server_type}: Error updating watched, {e}", 2)
|
||||
raise Exception(e)
|
||||
337
src/library.py
Normal file
337
src/library.py
Normal file
@@ -0,0 +1,337 @@
|
||||
from src.functions import (
|
||||
logger,
|
||||
match_list,
|
||||
search_mapping,
|
||||
)
|
||||
|
||||
|
||||
def check_skip_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping=None,
|
||||
):
|
||||
skip_reason = None
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
|
||||
skip_reason_black = check_blacklist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
library_other,
|
||||
)
|
||||
skip_reason_white = check_whitelist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
# Combine skip reasons
|
||||
if skip_reason_black:
|
||||
skip_reason = skip_reason_black
|
||||
|
||||
if skip_reason_white:
|
||||
if skip_reason:
|
||||
skip_reason = skip_reason + " and " + skip_reason_white
|
||||
else:
|
||||
skip_reason = skip_reason_white
|
||||
|
||||
return skip_reason
|
||||
|
||||
|
||||
def check_blacklist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
library_other=None,
|
||||
):
|
||||
skip_reason = None
|
||||
if isinstance(library_type, (list, tuple, set)):
|
||||
for library_type_item in library_type:
|
||||
if library_type_item.lower() in blacklist_library_type:
|
||||
skip_reason = f"{library_type_item} is in blacklist_library_type"
|
||||
else:
|
||||
if library_type.lower() in blacklist_library_type:
|
||||
skip_reason = f"{library_type} is in blacklist_library_type"
|
||||
|
||||
if library_title.lower() in [x.lower() for x in blacklist_library]:
|
||||
if skip_reason:
|
||||
skip_reason = (
|
||||
skip_reason + " and " + f"{library_title} is in blacklist_library"
|
||||
)
|
||||
else:
|
||||
skip_reason = f"{library_title} is in blacklist_library"
|
||||
|
||||
if library_other:
|
||||
if library_other.lower() in [x.lower() for x in blacklist_library]:
|
||||
if skip_reason:
|
||||
skip_reason = (
|
||||
skip_reason + " and " + f"{library_other} is in blacklist_library"
|
||||
)
|
||||
else:
|
||||
skip_reason = f"{library_other} is in blacklist_library"
|
||||
|
||||
return skip_reason
|
||||
|
||||
|
||||
def check_whitelist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_other=None,
|
||||
):
|
||||
skip_reason = None
|
||||
if len(whitelist_library_type) > 0:
|
||||
if isinstance(library_type, (list, tuple, set)):
|
||||
for library_type_item in library_type:
|
||||
if library_type_item.lower() not in whitelist_library_type:
|
||||
skip_reason = (
|
||||
f"{library_type_item} is not in whitelist_library_type"
|
||||
)
|
||||
else:
|
||||
if library_type.lower() not in whitelist_library_type:
|
||||
skip_reason = f"{library_type} is not in whitelist_library_type"
|
||||
|
||||
# if whitelist is not empty and library is not in whitelist
|
||||
if len(whitelist_library) > 0:
|
||||
if library_other:
|
||||
if library_title.lower() not in [
|
||||
x.lower() for x in whitelist_library
|
||||
] and library_other.lower() not in [x.lower() for x in whitelist_library]:
|
||||
if skip_reason:
|
||||
skip_reason = (
|
||||
skip_reason
|
||||
+ " and "
|
||||
+ f"{library_title} is not in whitelist_library"
|
||||
)
|
||||
else:
|
||||
skip_reason = f"{library_title} is not in whitelist_library"
|
||||
else:
|
||||
if library_title.lower() not in [x.lower() for x in whitelist_library]:
|
||||
if skip_reason:
|
||||
skip_reason = (
|
||||
skip_reason
|
||||
+ " and "
|
||||
+ f"{library_title} is not in whitelist_library"
|
||||
)
|
||||
else:
|
||||
skip_reason = f"{library_title} is not in whitelist_library"
|
||||
|
||||
return skip_reason
|
||||
|
||||
|
||||
def filter_libaries(
|
||||
server_libraries,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_mapping=None,
|
||||
):
|
||||
filtered_libaries = []
|
||||
for library in server_libraries:
|
||||
skip_reason = check_skip_logic(
|
||||
library,
|
||||
server_libraries[library],
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
if skip_reason:
|
||||
logger(f"Skipping library {library}: {skip_reason}", 1)
|
||||
continue
|
||||
|
||||
filtered_libaries.append(library)
|
||||
|
||||
return filtered_libaries
|
||||
|
||||
|
||||
def setup_libraries(
|
||||
server_1,
|
||||
server_2,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_mapping=None,
|
||||
):
|
||||
server_1_libraries = server_1.get_libraries()
|
||||
server_2_libraries = server_2.get_libraries()
|
||||
logger(f"Server 1 libraries: {server_1_libraries}", 1)
|
||||
logger(f"Server 2 libraries: {server_2_libraries}", 1)
|
||||
|
||||
# Filter out all blacklist, whitelist libaries
|
||||
filtered_server_1_libraries = filter_libaries(
|
||||
server_1_libraries,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
filtered_server_2_libraries = filter_libaries(
|
||||
server_2_libraries,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
output_server_1_libaries = match_list(
|
||||
filtered_server_1_libraries, filtered_server_2_libraries, library_mapping
|
||||
)
|
||||
output_server_2_libaries = match_list(
|
||||
filtered_server_2_libraries, filtered_server_1_libraries, library_mapping
|
||||
)
|
||||
|
||||
return output_server_1_libaries, output_server_2_libaries
|
||||
|
||||
|
||||
def show_title_dict(user_list: dict):
|
||||
try:
|
||||
show_output_dict = {}
|
||||
show_output_dict["locations"] = []
|
||||
show_counter = 0 # Initialize a counter for the current show position
|
||||
|
||||
show_output_keys = user_list.keys()
|
||||
show_output_keys = [dict(x) for x in list(show_output_keys)]
|
||||
for show_key in show_output_keys:
|
||||
for provider_key, provider_value in show_key.items():
|
||||
# Skip title
|
||||
if provider_key.lower() == "title":
|
||||
continue
|
||||
if provider_key.lower() not in show_output_dict:
|
||||
show_output_dict[provider_key.lower()] = [None] * show_counter
|
||||
if provider_key.lower() == "locations":
|
||||
show_output_dict[provider_key.lower()].append(provider_value)
|
||||
else:
|
||||
show_output_dict[provider_key.lower()].append(
|
||||
provider_value.lower()
|
||||
)
|
||||
|
||||
show_counter += 1
|
||||
for key in show_output_dict:
|
||||
if len(show_output_dict[key]) < show_counter:
|
||||
show_output_dict[key].append(None)
|
||||
|
||||
return show_output_dict
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def episode_title_dict(user_list: dict):
|
||||
try:
|
||||
episode_output_dict = {}
|
||||
episode_output_dict["completed"] = []
|
||||
episode_output_dict["time"] = []
|
||||
episode_output_dict["locations"] = []
|
||||
episode_output_dict["show"] = []
|
||||
episode_counter = 0 # Initialize a counter for the current episode position
|
||||
|
||||
# Iterate through the shows and episodes in user_list
|
||||
for show in user_list:
|
||||
|
||||
for episode in user_list[show]:
|
||||
# Add the show title to the episode_output_dict if it doesn't exist
|
||||
if "show" not in episode_output_dict:
|
||||
episode_output_dict["show"] = [None] * episode_counter
|
||||
|
||||
# Add the show title to the episode_output_dict
|
||||
episode_output_dict["show"].append(dict(show))
|
||||
|
||||
# Iterate through the keys and values in each episode
|
||||
for episode_key, episode_value in episode.items():
|
||||
# If the key is not "status", add the key to episode_output_dict if it doesn't exist
|
||||
if episode_key != "status":
|
||||
if episode_key.lower() not in episode_output_dict:
|
||||
# Initialize the list with None values up to the current episode position
|
||||
episode_output_dict[episode_key.lower()] = [
|
||||
None
|
||||
] * episode_counter
|
||||
|
||||
# If the key is "locations", append each location to the list
|
||||
if episode_key == "locations":
|
||||
episode_output_dict[episode_key.lower()].append(episode_value)
|
||||
|
||||
# If the key is "status", append the "completed" and "time" values
|
||||
elif episode_key == "status":
|
||||
episode_output_dict["completed"].append(
|
||||
episode_value["completed"]
|
||||
)
|
||||
episode_output_dict["time"].append(episode_value["time"])
|
||||
|
||||
# For other keys, append the value to the list
|
||||
else:
|
||||
episode_output_dict[episode_key.lower()].append(
|
||||
episode_value.lower()
|
||||
)
|
||||
|
||||
# Increment the episode_counter
|
||||
episode_counter += 1
|
||||
|
||||
# Extend the lists in episode_output_dict with None values to match the current episode_counter
|
||||
for key in episode_output_dict:
|
||||
if len(episode_output_dict[key]) < episode_counter:
|
||||
episode_output_dict[key].append(None)
|
||||
|
||||
return episode_output_dict
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def movies_title_dict(user_list: dict):
|
||||
try:
|
||||
movies_output_dict = {}
|
||||
movies_output_dict["completed"] = []
|
||||
movies_output_dict["time"] = []
|
||||
movies_output_dict["locations"] = []
|
||||
movie_counter = 0 # Initialize a counter for the current movie position
|
||||
|
||||
for movie in user_list:
|
||||
for movie_key, movie_value in movie.items():
|
||||
if movie_key != "status":
|
||||
if movie_key.lower() not in movies_output_dict:
|
||||
movies_output_dict[movie_key.lower()] = []
|
||||
|
||||
if movie_key == "locations":
|
||||
movies_output_dict[movie_key.lower()].append(movie_value)
|
||||
elif movie_key == "status":
|
||||
movies_output_dict["completed"].append(movie_value["completed"])
|
||||
movies_output_dict["time"].append(movie_value["time"])
|
||||
else:
|
||||
movies_output_dict[movie_key.lower()].append(movie_value.lower())
|
||||
|
||||
movie_counter += 1
|
||||
for key in movies_output_dict:
|
||||
if len(movies_output_dict[key]) < movie_counter:
|
||||
movies_output_dict[key].append(None)
|
||||
|
||||
return movies_output_dict
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def generate_library_guids_dict(user_list: dict):
|
||||
# Handle the case where user_list is empty or does not contain the expected keys and values
|
||||
if not user_list:
|
||||
return {}, {}, {}
|
||||
|
||||
show_output_dict = show_title_dict(user_list)
|
||||
episode_output_dict = episode_title_dict(user_list)
|
||||
movies_output_dict = movies_title_dict(user_list)
|
||||
|
||||
return show_output_dict, episode_output_dict, movies_output_dict
|
||||
618
src/main.py
618
src/main.py
@@ -1,520 +1,92 @@
|
||||
import copy, os, traceback, json, asyncio
|
||||
import os, traceback, json
|
||||
from dotenv import load_dotenv
|
||||
from time import sleep, perf_counter
|
||||
|
||||
from src.library import setup_libraries
|
||||
from src.functions import (
|
||||
logger,
|
||||
str_to_bool,
|
||||
search_mapping,
|
||||
generate_library_guids_dict,
|
||||
)
|
||||
from src.plex import Plex
|
||||
from src.jellyfin import Jellyfin
|
||||
from src.users import setup_users
|
||||
from src.watched import (
|
||||
cleanup_watched,
|
||||
)
|
||||
from src.black_white import setup_black_white_lists
|
||||
from src.connection import generate_server_connections
|
||||
|
||||
load_dotenv(override=True)
|
||||
|
||||
|
||||
def cleanup_watched(
|
||||
watched_list_1, watched_list_2, user_mapping=None, library_mapping=None
|
||||
):
|
||||
modified_watched_list_1 = copy.deepcopy(watched_list_1)
|
||||
def should_sync_server(server_1_type, server_2_type):
|
||||
sync_from_plex_to_jellyfin = str_to_bool(
|
||||
os.getenv("SYNC_FROM_PLEX_TO_JELLYFIN", "True")
|
||||
)
|
||||
sync_from_plex_to_plex = str_to_bool(os.getenv("SYNC_FROM_PLEX_TO_PLEX", "True"))
|
||||
sync_from_plex_to_emby = str_to_bool(os.getenv("SYNC_FROM_PLEX_TO_EMBY", "True"))
|
||||
|
||||
# remove entries from plex_watched that are in jellyfin_watched
|
||||
for user_1 in watched_list_1:
|
||||
user_other = None
|
||||
if user_mapping:
|
||||
user_other = search_mapping(user_mapping, user_1)
|
||||
if user_1 in modified_watched_list_1:
|
||||
if user_1 in watched_list_2:
|
||||
user_2 = user_1
|
||||
elif user_other in watched_list_2:
|
||||
user_2 = user_other
|
||||
else:
|
||||
logger(f"User {user_1} and {user_other} not found in watched list 2", 1)
|
||||
continue
|
||||
|
||||
for library_1 in watched_list_1[user_1]:
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
library_other = search_mapping(library_mapping, library_1)
|
||||
if library_1 in modified_watched_list_1[user_1]:
|
||||
if library_1 in watched_list_2[user_2]:
|
||||
library_2 = library_1
|
||||
elif library_other in watched_list_2[user_2]:
|
||||
library_2 = library_other
|
||||
else:
|
||||
logger(
|
||||
f"library {library_1} and {library_other} not found in watched list 2",
|
||||
1,
|
||||
)
|
||||
continue
|
||||
|
||||
(
|
||||
_,
|
||||
episode_watched_list_2_keys_dict,
|
||||
movies_watched_list_2_keys_dict,
|
||||
) = generate_library_guids_dict(watched_list_2[user_2][library_2])
|
||||
|
||||
# Movies
|
||||
if isinstance(watched_list_1[user_1][library_1], list):
|
||||
for movie in watched_list_1[user_1][library_1]:
|
||||
movie_found = False
|
||||
for movie_key, movie_value in movie.items():
|
||||
if movie_key == "locations":
|
||||
if (
|
||||
"locations"
|
||||
in movies_watched_list_2_keys_dict.keys()
|
||||
):
|
||||
for location in movie_value:
|
||||
if (
|
||||
location
|
||||
in movies_watched_list_2_keys_dict[
|
||||
"locations"
|
||||
]
|
||||
):
|
||||
movie_found = True
|
||||
break
|
||||
else:
|
||||
if (
|
||||
movie_key
|
||||
in movies_watched_list_2_keys_dict.keys()
|
||||
):
|
||||
if (
|
||||
movie_value
|
||||
in movies_watched_list_2_keys_dict[
|
||||
movie_key
|
||||
]
|
||||
):
|
||||
movie_found = True
|
||||
|
||||
if movie_found:
|
||||
logger(f"Removing {movie} from {library_1}", 3)
|
||||
modified_watched_list_1[user_1][library_1].remove(
|
||||
movie
|
||||
)
|
||||
break
|
||||
|
||||
# TV Shows
|
||||
elif isinstance(watched_list_1[user_1][library_1], dict):
|
||||
# Generate full list of provider ids for episodes in watch_list_2 to easily compare if they exist in watch_list_1
|
||||
|
||||
for show_key_1 in watched_list_1[user_1][library_1].keys():
|
||||
show_key_dict = dict(show_key_1)
|
||||
for season in watched_list_1[user_1][library_1][show_key_1]:
|
||||
for episode in watched_list_1[user_1][library_1][
|
||||
show_key_1
|
||||
][season]:
|
||||
episode_found = False
|
||||
for episode_key, episode_value in episode.items():
|
||||
# If episode_key and episode_value are in episode_watched_list_2_keys_dict exactly, then remove from watch_list_1
|
||||
if episode_key == "locations":
|
||||
if (
|
||||
"locations"
|
||||
in episode_watched_list_2_keys_dict.keys()
|
||||
):
|
||||
for location in episode_value:
|
||||
if (
|
||||
location
|
||||
in episode_watched_list_2_keys_dict[
|
||||
"locations"
|
||||
]
|
||||
):
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
else:
|
||||
if (
|
||||
episode_key
|
||||
in episode_watched_list_2_keys_dict.keys()
|
||||
):
|
||||
if (
|
||||
episode_value
|
||||
in episode_watched_list_2_keys_dict[
|
||||
episode_key
|
||||
]
|
||||
):
|
||||
episode_found = True
|
||||
|
||||
if episode_found:
|
||||
if (
|
||||
episode
|
||||
in modified_watched_list_1[user_1][
|
||||
library_1
|
||||
][show_key_1][season]
|
||||
):
|
||||
logger(
|
||||
f"Removing {episode} from {show_key_dict['title']}",
|
||||
3,
|
||||
)
|
||||
modified_watched_list_1[user_1][
|
||||
library_1
|
||||
][show_key_1][season].remove(episode)
|
||||
break
|
||||
|
||||
# Remove empty seasons
|
||||
if (
|
||||
len(
|
||||
modified_watched_list_1[user_1][library_1][
|
||||
show_key_1
|
||||
][season]
|
||||
)
|
||||
== 0
|
||||
):
|
||||
if (
|
||||
season
|
||||
in modified_watched_list_1[user_1][library_1][
|
||||
show_key_1
|
||||
]
|
||||
):
|
||||
logger(
|
||||
f"Removing {season} from {show_key_dict['title']} because it is empty",
|
||||
3,
|
||||
)
|
||||
del modified_watched_list_1[user_1][library_1][
|
||||
show_key_1
|
||||
][season]
|
||||
|
||||
# If the show is empty, remove the show
|
||||
if (
|
||||
len(
|
||||
modified_watched_list_1[user_1][library_1][
|
||||
show_key_1
|
||||
]
|
||||
)
|
||||
== 0
|
||||
):
|
||||
if (
|
||||
show_key_1
|
||||
in modified_watched_list_1[user_1][library_1]
|
||||
):
|
||||
logger(
|
||||
f"Removing {show_key_dict['title']} from {library_1} because it is empty",
|
||||
1,
|
||||
)
|
||||
del modified_watched_list_1[user_1][library_1][
|
||||
show_key_1
|
||||
]
|
||||
|
||||
for user_1 in watched_list_1:
|
||||
for library_1 in watched_list_1[user_1]:
|
||||
if library_1 in modified_watched_list_1[user_1]:
|
||||
# If library is empty then remove it
|
||||
if len(modified_watched_list_1[user_1][library_1]) == 0:
|
||||
logger(f"Removing {library_1} from {user_1} because it is empty", 1)
|
||||
del modified_watched_list_1[user_1][library_1]
|
||||
|
||||
if user_1 in modified_watched_list_1:
|
||||
# If user is empty delete user
|
||||
if len(modified_watched_list_1[user_1]) == 0:
|
||||
logger(f"Removing {user_1} from watched list 1 because it is empty", 1)
|
||||
del modified_watched_list_1[user_1]
|
||||
|
||||
return modified_watched_list_1
|
||||
|
||||
|
||||
def setup_black_white_lists(
|
||||
blacklist_library: str,
|
||||
whitelist_library: str,
|
||||
blacklist_library_type: str,
|
||||
whitelist_library_type: str,
|
||||
blacklist_users: str,
|
||||
whitelist_users: str,
|
||||
library_mapping=None,
|
||||
user_mapping=None,
|
||||
):
|
||||
if blacklist_library:
|
||||
if len(blacklist_library) > 0:
|
||||
blacklist_library = blacklist_library.split(",")
|
||||
blacklist_library = [x.strip() for x in blacklist_library]
|
||||
if library_mapping:
|
||||
temp_library = []
|
||||
for library in blacklist_library:
|
||||
library_other = search_mapping(library_mapping, library)
|
||||
if library_other:
|
||||
temp_library.append(library_other)
|
||||
|
||||
blacklist_library = blacklist_library + temp_library
|
||||
else:
|
||||
blacklist_library = []
|
||||
logger(f"Blacklist Library: {blacklist_library}", 1)
|
||||
|
||||
if whitelist_library:
|
||||
if len(whitelist_library) > 0:
|
||||
whitelist_library = whitelist_library.split(",")
|
||||
whitelist_library = [x.strip() for x in whitelist_library]
|
||||
if library_mapping:
|
||||
temp_library = []
|
||||
for library in whitelist_library:
|
||||
library_other = search_mapping(library_mapping, library)
|
||||
if library_other:
|
||||
temp_library.append(library_other)
|
||||
|
||||
whitelist_library = whitelist_library + temp_library
|
||||
else:
|
||||
whitelist_library = []
|
||||
logger(f"Whitelist Library: {whitelist_library}", 1)
|
||||
|
||||
if blacklist_library_type:
|
||||
if len(blacklist_library_type) > 0:
|
||||
blacklist_library_type = blacklist_library_type.split(",")
|
||||
blacklist_library_type = [x.lower().strip() for x in blacklist_library_type]
|
||||
else:
|
||||
blacklist_library_type = []
|
||||
logger(f"Blacklist Library Type: {blacklist_library_type}", 1)
|
||||
|
||||
if whitelist_library_type:
|
||||
if len(whitelist_library_type) > 0:
|
||||
whitelist_library_type = whitelist_library_type.split(",")
|
||||
whitelist_library_type = [x.lower().strip() for x in whitelist_library_type]
|
||||
else:
|
||||
whitelist_library_type = []
|
||||
logger(f"Whitelist Library Type: {whitelist_library_type}", 1)
|
||||
|
||||
if blacklist_users:
|
||||
if len(blacklist_users) > 0:
|
||||
blacklist_users = blacklist_users.split(",")
|
||||
blacklist_users = [x.lower().strip() for x in blacklist_users]
|
||||
if user_mapping:
|
||||
temp_users = []
|
||||
for user in blacklist_users:
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
if user_other:
|
||||
temp_users.append(user_other)
|
||||
|
||||
blacklist_users = blacklist_users + temp_users
|
||||
else:
|
||||
blacklist_users = []
|
||||
logger(f"Blacklist Users: {blacklist_users}", 1)
|
||||
|
||||
if whitelist_users:
|
||||
if len(whitelist_users) > 0:
|
||||
whitelist_users = whitelist_users.split(",")
|
||||
whitelist_users = [x.lower().strip() for x in whitelist_users]
|
||||
if user_mapping:
|
||||
temp_users = []
|
||||
for user in whitelist_users:
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
if user_other:
|
||||
temp_users.append(user_other)
|
||||
|
||||
whitelist_users = whitelist_users + temp_users
|
||||
else:
|
||||
whitelist_users = []
|
||||
else:
|
||||
whitelist_users = []
|
||||
logger(f"Whitelist Users: {whitelist_users}", 1)
|
||||
|
||||
return (
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
sync_from_jelly_to_plex = str_to_bool(
|
||||
os.getenv("SYNC_FROM_JELLYFIN_TO_PLEX", "True")
|
||||
)
|
||||
sync_from_jelly_to_jellyfin = str_to_bool(
|
||||
os.getenv("SYNC_FROM_JELLYFIN_TO_JELLYFIN", "True")
|
||||
)
|
||||
sync_from_jelly_to_emby = str_to_bool(
|
||||
os.getenv("SYNC_FROM_JELLYFIN_TO_EMBY", "True")
|
||||
)
|
||||
|
||||
|
||||
def setup_users(
|
||||
server_1, server_2, blacklist_users, whitelist_users, user_mapping=None
|
||||
):
|
||||
|
||||
# generate list of users from server 1 and server 2
|
||||
server_1_type = server_1[0]
|
||||
server_1_connection = server_1[1]
|
||||
server_2_type = server_2[0]
|
||||
server_2_connection = server_2[1]
|
||||
print(f"Server 1: {server_1_type} {server_1_connection}")
|
||||
print(f"Server 2: {server_2_type} {server_2_connection}")
|
||||
|
||||
server_1_users = []
|
||||
if server_1_type == "plex":
|
||||
server_1_users = [x.title.lower() for x in server_1_connection.users]
|
||||
elif server_1_type == "jellyfin":
|
||||
server_1_users = [key.lower() for key in server_1_connection.users.keys()]
|
||||
|
||||
server_2_users = []
|
||||
if server_2_type == "plex":
|
||||
server_2_users = [x.title.lower() for x in server_2_connection.users]
|
||||
elif server_2_type == "jellyfin":
|
||||
server_2_users = [key.lower() for key in server_2_connection.users.keys()]
|
||||
|
||||
# combined list of overlapping users from plex and jellyfin
|
||||
users = {}
|
||||
|
||||
for server_1_user in server_1_users:
|
||||
if user_mapping:
|
||||
jellyfin_plex_mapped_user = search_mapping(user_mapping, server_1_user)
|
||||
if jellyfin_plex_mapped_user:
|
||||
users[server_1_user] = jellyfin_plex_mapped_user
|
||||
continue
|
||||
|
||||
if server_1_user in server_2_users:
|
||||
users[server_1_user] = server_1_user
|
||||
|
||||
for server_2_user in server_2_users:
|
||||
if user_mapping:
|
||||
plex_jellyfin_mapped_user = search_mapping(user_mapping, server_2_user)
|
||||
if plex_jellyfin_mapped_user:
|
||||
users[plex_jellyfin_mapped_user] = server_2_user
|
||||
continue
|
||||
|
||||
if server_2_user in server_1_users:
|
||||
users[server_2_user] = server_2_user
|
||||
|
||||
logger(f"User list that exist on both servers {users}", 1)
|
||||
|
||||
users_filtered = {}
|
||||
for user in users:
|
||||
# whitelist_user is not empty and user lowercase is not in whitelist lowercase
|
||||
if len(whitelist_users) > 0:
|
||||
if user not in whitelist_users and users[user] not in whitelist_users:
|
||||
logger(f"{user} or {users[user]} is not in whitelist", 1)
|
||||
continue
|
||||
|
||||
if user not in blacklist_users and users[user] not in blacklist_users:
|
||||
users_filtered[user] = users[user]
|
||||
|
||||
logger(f"Filtered user list {users_filtered}", 1)
|
||||
sync_from_emby_to_plex = str_to_bool(os.getenv("SYNC_FROM_EMBY_TO_PLEX", "True"))
|
||||
sync_from_emby_to_jellyfin = str_to_bool(
|
||||
os.getenv("SYNC_FROM_EMBY_TO_JELLYFIN", "True")
|
||||
)
|
||||
sync_from_emby_to_emby = str_to_bool(os.getenv("SYNC_FROM_EMBY_TO_EMBY", "True"))
|
||||
|
||||
if server_1_type == "plex":
|
||||
output_server_1_users = []
|
||||
for plex_user in server_1_connection.users:
|
||||
if (
|
||||
plex_user.title.lower() in users_filtered.keys()
|
||||
or plex_user.title.lower() in users_filtered.values()
|
||||
):
|
||||
output_server_1_users.append(plex_user)
|
||||
elif server_1_type == "jellyfin":
|
||||
output_server_1_users = {}
|
||||
for jellyfin_user, jellyfin_id in server_1_connection.users.items():
|
||||
if (
|
||||
jellyfin_user.lower() in users_filtered.keys()
|
||||
or jellyfin_user.lower() in users_filtered.values()
|
||||
):
|
||||
output_server_1_users[jellyfin_user] = jellyfin_id
|
||||
if server_2_type == "jellyfin" and not sync_from_plex_to_jellyfin:
|
||||
logger("Sync from plex -> jellyfin is disabled", 1)
|
||||
return False
|
||||
|
||||
if server_2_type == "plex":
|
||||
output_server_2_users = []
|
||||
for plex_user in server_2_connection.users:
|
||||
if (
|
||||
plex_user.title.lower() in users_filtered.keys()
|
||||
or plex_user.title.lower() in users_filtered.values()
|
||||
):
|
||||
output_server_2_users.append(plex_user)
|
||||
elif server_2_type == "jellyfin":
|
||||
output_server_2_users = {}
|
||||
for jellyfin_user, jellyfin_id in server_2_connection.users.items():
|
||||
if (
|
||||
jellyfin_user.lower() in users_filtered.keys()
|
||||
or jellyfin_user.lower() in users_filtered.values()
|
||||
):
|
||||
output_server_2_users[jellyfin_user] = jellyfin_id
|
||||
if server_2_type == "emby" and not sync_from_plex_to_emby:
|
||||
logger("Sync from plex -> emby is disabled", 1)
|
||||
return False
|
||||
|
||||
if len(output_server_1_users) == 0:
|
||||
raise Exception(
|
||||
f"No users found for server 1, users found {users} filtered users {users_filtered}"
|
||||
)
|
||||
if server_2_type == "plex" and not sync_from_plex_to_plex:
|
||||
logger("Sync from plex -> plex is disabled", 1)
|
||||
return False
|
||||
|
||||
if len(output_server_2_users) == 0:
|
||||
raise Exception(
|
||||
f"No users found for server 2, users found {users} filtered users {users_filtered}"
|
||||
)
|
||||
if server_1_type == "jellyfin":
|
||||
if server_2_type == "plex" and not sync_from_jelly_to_plex:
|
||||
logger("Sync from jellyfin -> plex is disabled", 1)
|
||||
return False
|
||||
|
||||
logger(f"Server 1 users: {output_server_1_users}", 1)
|
||||
logger(f"Server 2 users: {output_server_2_users}", 1)
|
||||
if server_2_type == "jellyfin" and not sync_from_jelly_to_jellyfin:
|
||||
logger("Sync from jellyfin -> jellyfin is disabled", 1)
|
||||
return False
|
||||
|
||||
return output_server_1_users, output_server_2_users
|
||||
if server_2_type == "emby" and not sync_from_jelly_to_emby:
|
||||
logger("Sync from jellyfin -> emby is disabled", 1)
|
||||
return False
|
||||
|
||||
if server_1_type == "emby":
|
||||
if server_2_type == "plex" and not sync_from_emby_to_plex:
|
||||
logger("Sync from emby -> plex is disabled", 1)
|
||||
return False
|
||||
|
||||
def generate_server_connections():
|
||||
servers = []
|
||||
if server_2_type == "jellyfin" and not sync_from_emby_to_jellyfin:
|
||||
logger("Sync from emby -> jellyfin is disabled", 1)
|
||||
return False
|
||||
|
||||
plex_baseurl = os.getenv("PLEX_BASEURL", None)
|
||||
plex_token = os.getenv("PLEX_TOKEN", None)
|
||||
plex_username = os.getenv("PLEX_USERNAME", None)
|
||||
plex_password = os.getenv("PLEX_PASSWORD", None)
|
||||
plex_servername = os.getenv("PLEX_SERVERNAME", None)
|
||||
ssl_bypass = str_to_bool(os.getenv("SSL_BYPASS", "False"))
|
||||
if server_2_type == "emby" and not sync_from_emby_to_emby:
|
||||
logger("Sync from emby -> emby is disabled", 1)
|
||||
return False
|
||||
|
||||
if plex_baseurl and plex_token:
|
||||
plex_baseurl = plex_baseurl.split(",")
|
||||
plex_token = plex_token.split(",")
|
||||
|
||||
if len(plex_baseurl) != len(plex_token):
|
||||
raise Exception(
|
||||
"PLEX_BASEURL and PLEX_TOKEN must have the same number of entries"
|
||||
)
|
||||
|
||||
for i, url in enumerate(plex_baseurl):
|
||||
servers.append(
|
||||
(
|
||||
"plex",
|
||||
Plex(
|
||||
baseurl=url.strip(),
|
||||
token=plex_token[i].strip(),
|
||||
username=None,
|
||||
password=None,
|
||||
servername=None,
|
||||
ssl_bypass=ssl_bypass,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
if plex_username and plex_password and plex_servername:
|
||||
plex_username = plex_username.split(",")
|
||||
plex_password = plex_password.split(",")
|
||||
plex_servername = plex_servername.split(",")
|
||||
|
||||
if len(plex_username) != len(plex_password) or len(plex_username) != len(
|
||||
plex_servername
|
||||
):
|
||||
raise Exception(
|
||||
"PLEX_USERNAME, PLEX_PASSWORD and PLEX_SERVERNAME must have the same number of entries"
|
||||
)
|
||||
|
||||
for i, username in enumerate(plex_username):
|
||||
servers.append(
|
||||
(
|
||||
"plex",
|
||||
Plex(
|
||||
baseurl=None,
|
||||
token=None,
|
||||
username=username.strip(),
|
||||
password=plex_password[i].strip(),
|
||||
servername=plex_servername[i].strip(),
|
||||
ssl_bypass=ssl_bypass,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
jellyfin_baseurl = os.getenv("JELLYFIN_BASEURL", None)
|
||||
jellyfin_token = os.getenv("JELLYFIN_TOKEN", None)
|
||||
|
||||
if jellyfin_baseurl and jellyfin_token:
|
||||
jellyfin_baseurl = jellyfin_baseurl.split(",")
|
||||
jellyfin_token = jellyfin_token.split(",")
|
||||
|
||||
if len(jellyfin_baseurl) != len(jellyfin_token):
|
||||
raise Exception(
|
||||
"JELLYFIN_BASEURL and JELLYFIN_TOKEN must have the same number of entries"
|
||||
)
|
||||
|
||||
for i, baseurl in enumerate(jellyfin_baseurl):
|
||||
servers.append(
|
||||
(
|
||||
"jellyfin",
|
||||
Jellyfin(baseurl=baseurl.strip(), token=jellyfin_token[i].strip()),
|
||||
)
|
||||
)
|
||||
|
||||
return servers
|
||||
return True
|
||||
|
||||
|
||||
def main_loop():
|
||||
logfile = os.getenv("LOGFILE", "log.log")
|
||||
# Delete logfile if it exists
|
||||
if os.path.exists(logfile):
|
||||
os.remove(logfile)
|
||||
log_file = os.getenv("LOG_FILE", os.getenv("LOGFILE", "log.log"))
|
||||
# Delete log_file if it exists
|
||||
if os.path.exists(log_file):
|
||||
os.remove(log_file)
|
||||
|
||||
dryrun = str_to_bool(os.getenv("DRYRUN", "False"))
|
||||
logger(f"Dryrun: {dryrun}", 1)
|
||||
@@ -567,9 +139,14 @@ def main_loop():
|
||||
|
||||
# Start server_2 at the next server in the list
|
||||
for server_2 in servers[servers.index(server_1) + 1 :]:
|
||||
# Check if server 1 and server 2 are going to be synced in either direction, skip if not
|
||||
if not should_sync_server(
|
||||
server_1[0], server_2[0]
|
||||
) and not should_sync_server(server_2[0], server_1[0]):
|
||||
continue
|
||||
|
||||
server_1_connection = server_1[1]
|
||||
server_2_connection = server_2[1]
|
||||
logger(f"Server 1: {server_1[0].capitalize()}: {server_1[1].info()}", 0)
|
||||
logger(f"Server 2: {server_2[0].capitalize()}: {server_2[1].info()}", 0)
|
||||
|
||||
# Create users list
|
||||
logger("Creating users list", 1)
|
||||
@@ -577,27 +154,27 @@ def main_loop():
|
||||
server_1, server_2, blacklist_users, whitelist_users, user_mapping
|
||||
)
|
||||
|
||||
logger("Creating watched lists", 1)
|
||||
server_1_watched = server_1_connection.get_watched(
|
||||
server_1_users,
|
||||
server_1_libraries, server_2_libraries = setup_libraries(
|
||||
server_1[1],
|
||||
server_2[1],
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
logger("Creating watched lists", 1)
|
||||
server_1_watched = server_1[1].get_watched(
|
||||
server_1_users, server_1_libraries
|
||||
)
|
||||
logger("Finished creating watched list server 1", 1)
|
||||
server_2_watched = asyncio.run(
|
||||
server_2_connection.get_watched(
|
||||
server_2_users,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
server_2_watched = server_2[1].get_watched(
|
||||
server_2_users, server_2_libraries
|
||||
)
|
||||
logger("Finished creating watched list server 2", 1)
|
||||
|
||||
logger(f"Server 1 watched: {server_1_watched}", 3)
|
||||
logger(f"Server 2 watched: {server_2_watched}", 3)
|
||||
|
||||
@@ -620,17 +197,27 @@ def main_loop():
|
||||
1,
|
||||
)
|
||||
|
||||
server_1_connection.update_watched(
|
||||
server_2_watched_filtered, user_mapping, library_mapping, dryrun
|
||||
)
|
||||
asyncio.run(
|
||||
server_2_connection.update_watched(
|
||||
server_1_watched_filtered, user_mapping, library_mapping, dryrun
|
||||
if should_sync_server(server_2[0], server_1[0]):
|
||||
logger(f"Syncing {server_2[1].info()} -> {server_1[1].info()}", 0)
|
||||
server_1[1].update_watched(
|
||||
server_2_watched_filtered,
|
||||
user_mapping,
|
||||
library_mapping,
|
||||
dryrun,
|
||||
)
|
||||
|
||||
if should_sync_server(server_1[0], server_2[0]):
|
||||
logger(f"Syncing {server_1[1].info()} -> {server_2[1].info()}", 0)
|
||||
server_2[1].update_watched(
|
||||
server_1_watched_filtered,
|
||||
user_mapping,
|
||||
library_mapping,
|
||||
dryrun,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
run_only_once = str_to_bool(os.getenv("RUN_ONLY_ONCE", "False"))
|
||||
sleep_duration = float(os.getenv("SLEEP_DURATION", "3600"))
|
||||
times = []
|
||||
while True:
|
||||
@@ -643,6 +230,9 @@ def main():
|
||||
if len(times) > 0:
|
||||
logger(f"Average time: {sum(times) / len(times)}", 0)
|
||||
|
||||
if run_only_once:
|
||||
break
|
||||
|
||||
logger(f"Looping in {sleep_duration}")
|
||||
sleep(sleep_duration)
|
||||
|
||||
@@ -654,9 +244,15 @@ def main():
|
||||
logger(error, log_type=2)
|
||||
|
||||
logger(traceback.format_exc(), 2)
|
||||
|
||||
if run_only_once:
|
||||
break
|
||||
|
||||
logger(f"Retrying in {sleep_duration}", log_type=0)
|
||||
sleep(sleep_duration)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
if len(times) > 0:
|
||||
logger(f"Average time: {sum(times) / len(times)}", 0)
|
||||
logger("Exiting", log_type=0)
|
||||
os._exit(0)
|
||||
|
||||
615
src/plex.py
615
src/plex.py
@@ -1,115 +1,289 @@
|
||||
import re, requests
|
||||
from urllib3.poolmanager import PoolManager
|
||||
import os, requests, traceback
|
||||
from dotenv import load_dotenv
|
||||
from typing import Dict, Union, FrozenSet
|
||||
|
||||
from urllib3.poolmanager import PoolManager
|
||||
from math import floor
|
||||
|
||||
from requests.adapters import HTTPAdapter as RequestsHTTPAdapter
|
||||
|
||||
from plexapi.video import Show, Episode, Movie
|
||||
from plexapi.server import PlexServer
|
||||
from plexapi.myplex import MyPlexAccount
|
||||
|
||||
from src.functions import (
|
||||
logger,
|
||||
search_mapping,
|
||||
check_skip_logic,
|
||||
generate_library_guids_dict,
|
||||
future_thread_executor,
|
||||
contains_nested,
|
||||
log_marked,
|
||||
str_to_bool,
|
||||
)
|
||||
from src.library import generate_library_guids_dict
|
||||
|
||||
|
||||
load_dotenv(override=True)
|
||||
|
||||
generate_guids = str_to_bool(os.getenv("GENERATE_GUIDS", "True"))
|
||||
generate_locations = str_to_bool(os.getenv("GENERATE_LOCATIONS", "True"))
|
||||
|
||||
|
||||
# Bypass hostname validation for ssl. Taken from https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186
|
||||
class HostNameIgnoringAdapter(requests.adapters.HTTPAdapter):
|
||||
class HostNameIgnoringAdapter(RequestsHTTPAdapter):
|
||||
def init_poolmanager(self, connections, maxsize, block=..., **pool_kwargs):
|
||||
self.poolmanager = PoolManager(num_pools=connections,
|
||||
maxsize=maxsize,
|
||||
block=block,
|
||||
assert_hostname=False,
|
||||
**pool_kwargs)
|
||||
self.poolmanager = PoolManager(
|
||||
num_pools=connections,
|
||||
maxsize=maxsize,
|
||||
block=block,
|
||||
assert_hostname=False,
|
||||
**pool_kwargs,
|
||||
)
|
||||
|
||||
def get_user_watched(user, user_plex, library):
|
||||
|
||||
def extract_guids_from_item(item: Union[Movie, Show, Episode]) -> Dict[str, str]:
|
||||
# If GENERATE_GUIDS is set to False, then return an empty dict
|
||||
if not generate_guids:
|
||||
return {}
|
||||
|
||||
guids: Dict[str, str] = dict(
|
||||
guid.id.split("://")
|
||||
for guid in item.guids
|
||||
if guid.id is not None and len(guid.id.strip()) > 0
|
||||
)
|
||||
|
||||
if len(guids) == 0:
|
||||
logger(
|
||||
f"Plex: Failed to get any guids for {item.title}",
|
||||
1,
|
||||
)
|
||||
|
||||
return guids
|
||||
|
||||
|
||||
def get_guids(item: Union[Movie, Episode], completed=True):
|
||||
if not item.locations:
|
||||
logger(
|
||||
f"Plex: {item.title} has no locations",
|
||||
1,
|
||||
)
|
||||
|
||||
if not item.guids:
|
||||
logger(
|
||||
f"Plex: {item.title} has no guids",
|
||||
1,
|
||||
)
|
||||
|
||||
return {
|
||||
"title": item.title,
|
||||
"locations": (
|
||||
tuple([location.split("/")[-1] for location in item.locations])
|
||||
if generate_locations
|
||||
else tuple()
|
||||
),
|
||||
"status": {
|
||||
"completed": completed,
|
||||
"time": item.viewOffset,
|
||||
},
|
||||
} | extract_guids_from_item(
|
||||
item
|
||||
) # Merge the metadata and guid dictionaries
|
||||
|
||||
|
||||
def get_user_library_watched_show(show, process_episodes, threads=None):
|
||||
try:
|
||||
user_name = user.title.lower()
|
||||
user_watched = {}
|
||||
user_watched[user_name] = {}
|
||||
show_guids: FrozenSet = frozenset(
|
||||
(
|
||||
{
|
||||
"title": show.title,
|
||||
"locations": (
|
||||
tuple([location.split("/")[-1] for location in show.locations])
|
||||
if generate_locations
|
||||
else tuple()
|
||||
),
|
||||
}
|
||||
| extract_guids_from_item(show)
|
||||
).items() # Merge the metadata and guid dictionaries
|
||||
)
|
||||
|
||||
episode_guids_args = []
|
||||
|
||||
for episode in process_episodes:
|
||||
episode_guids_args.append([get_guids, episode, episode.isWatched])
|
||||
|
||||
episode_guids_results = future_thread_executor(
|
||||
episode_guids_args, threads=threads
|
||||
)
|
||||
|
||||
episode_guids = []
|
||||
for index, episode in enumerate(process_episodes):
|
||||
episode_guids.append(episode_guids_results[index])
|
||||
|
||||
return show_guids, episode_guids
|
||||
except Exception:
|
||||
return {}, {}
|
||||
|
||||
|
||||
def get_user_library_watched(user, user_plex, library):
|
||||
user_name: str = user.username.lower() if user.username else user.title.lower()
|
||||
try:
|
||||
logger(
|
||||
f"Plex: Generating watched for {user_name} in library {library.title}",
|
||||
0,
|
||||
)
|
||||
|
||||
library_videos = user_plex.library.section(library.title)
|
||||
|
||||
if library.type == "movie":
|
||||
user_watched[user_name][library.title] = []
|
||||
watched = []
|
||||
|
||||
library_videos = user_plex.library.section(library.title)
|
||||
for video in library_videos.search(unwatched=False):
|
||||
movie_guids = {}
|
||||
for guid in video.guids:
|
||||
guid_source = re.search(r"(.*)://", guid.id).group(1).lower()
|
||||
guid_id = re.search(r"://(.*)", guid.id).group(1)
|
||||
movie_guids[guid_source] = guid_id
|
||||
|
||||
movie_guids["title"] = video.title
|
||||
movie_guids["locations"] = tuple(
|
||||
[x.split("/")[-1] for x in video.locations]
|
||||
)
|
||||
|
||||
user_watched[user_name][library.title].append(movie_guids)
|
||||
args = [
|
||||
[get_guids, video, video.isWatched]
|
||||
for video in library_videos.search(unwatched=False)
|
||||
+ library_videos.search(inProgress=True)
|
||||
if video.isWatched or video.viewOffset >= 60000
|
||||
]
|
||||
|
||||
for guid in future_thread_executor(args, threads=len(args)):
|
||||
logger(f"Plex: Adding {guid['title']} to {user_name} watched list", 3)
|
||||
watched.append(guid)
|
||||
elif library.type == "show":
|
||||
user_watched[user_name][library.title] = {}
|
||||
watched = {}
|
||||
|
||||
library_videos = user_plex.library.section(library.title)
|
||||
for show in library_videos.search(unwatched=False):
|
||||
show_guids = {}
|
||||
for show_guid in show.guids:
|
||||
# Extract after :// from guid.id
|
||||
show_guid_source = (
|
||||
re.search(r"(.*)://", show_guid.id).group(1).lower()
|
||||
# Get all watched shows and partially watched shows
|
||||
parallel_show_task = []
|
||||
parallel_episodes_task = []
|
||||
|
||||
for show in library_videos.search(unwatched=False) + library_videos.search(
|
||||
inProgress=True
|
||||
):
|
||||
process_episodes = []
|
||||
for episode in show.episodes():
|
||||
if episode.isWatched or episode.viewOffset >= 60000:
|
||||
process_episodes.append(episode)
|
||||
|
||||
# Shows with more than 24 episodes has its episodes processed in parallel
|
||||
# Shows with less than 24 episodes has its episodes processed in serial but the shows are processed in parallel
|
||||
if len(process_episodes) >= 24:
|
||||
parallel_episodes_task.append(
|
||||
[
|
||||
get_user_library_watched_show,
|
||||
show,
|
||||
process_episodes,
|
||||
len(process_episodes),
|
||||
]
|
||||
)
|
||||
else:
|
||||
parallel_show_task.append(
|
||||
[get_user_library_watched_show, show, process_episodes, 1]
|
||||
)
|
||||
show_guid_id = re.search(r"://(.*)", show_guid.id).group(1)
|
||||
show_guids[show_guid_source] = show_guid_id
|
||||
|
||||
show_guids["title"] = show.title
|
||||
show_guids["locations"] = tuple(
|
||||
[x.split("/")[-1] for x in show.locations]
|
||||
)
|
||||
show_guids = frozenset(show_guids.items())
|
||||
for show_guids, episode_guids in future_thread_executor(
|
||||
parallel_show_task, threads=len(parallel_show_task)
|
||||
) + future_thread_executor(parallel_episodes_task, threads=1):
|
||||
if show_guids and episode_guids:
|
||||
watched[show_guids] = episode_guids
|
||||
logger(
|
||||
f"Plex: Added {episode_guids} to {user_name} watched list",
|
||||
3,
|
||||
)
|
||||
|
||||
for season in show.seasons():
|
||||
episode_guids = []
|
||||
for episode in season.episodes():
|
||||
if episode.viewCount > 0:
|
||||
episode_guids_temp = {}
|
||||
for guid in episode.guids:
|
||||
# Extract after :// from guid.id
|
||||
guid_source = (
|
||||
re.search(r"(.*)://", guid.id).group(1).lower()
|
||||
)
|
||||
guid_id = re.search(r"://(.*)", guid.id).group(1)
|
||||
episode_guids_temp[guid_source] = guid_id
|
||||
else:
|
||||
watched = None
|
||||
|
||||
episode_guids_temp["locations"] = tuple(
|
||||
[x.split("/")[-1] for x in episode.locations]
|
||||
)
|
||||
episode_guids.append(episode_guids_temp)
|
||||
logger(f"Plex: Got watched for {user_name} in library {library.title}", 1)
|
||||
logger(f"Plex: {watched}", 3)
|
||||
|
||||
if episode_guids:
|
||||
# append show, season, episode
|
||||
if show_guids not in user_watched[user_name][library.title]:
|
||||
user_watched[user_name][library.title][show_guids] = {}
|
||||
if (
|
||||
season.title
|
||||
not in user_watched[user_name][library.title][show_guids]
|
||||
):
|
||||
user_watched[user_name][library.title][show_guids][
|
||||
season.title
|
||||
] = {}
|
||||
user_watched[user_name][library.title][show_guids][
|
||||
season.title
|
||||
] = episode_guids
|
||||
|
||||
return user_watched
|
||||
return {user_name: {library.title: watched} if watched is not None else {}}
|
||||
except Exception as e:
|
||||
logger(
|
||||
f"Plex: Failed to get watched for {user_name} in library {library.title}, Error: {e}",
|
||||
2,
|
||||
)
|
||||
raise Exception(e)
|
||||
return {}
|
||||
|
||||
|
||||
def find_video(plex_search, video_ids, videos=None):
|
||||
try:
|
||||
if not generate_guids and not generate_locations:
|
||||
return False, []
|
||||
|
||||
if generate_locations:
|
||||
for location in plex_search.locations:
|
||||
if (
|
||||
contains_nested(location.split("/")[-1], video_ids["locations"])
|
||||
is not None
|
||||
):
|
||||
episode_videos = []
|
||||
if videos:
|
||||
for show, episodes in videos.items():
|
||||
show = {k: v for k, v in show}
|
||||
if (
|
||||
contains_nested(
|
||||
location.split("/")[-1], show["locations"]
|
||||
)
|
||||
is not None
|
||||
):
|
||||
for episode in episodes:
|
||||
episode_videos.append(episode)
|
||||
|
||||
return True, episode_videos
|
||||
|
||||
if generate_guids:
|
||||
for guid in plex_search.guids:
|
||||
guid_source, guid_id = guid.id.split("://")
|
||||
|
||||
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
||||
if guid_source in video_ids.keys():
|
||||
if guid_id in video_ids[guid_source]:
|
||||
episode_videos = []
|
||||
if videos:
|
||||
for show, episodes in videos.items():
|
||||
show = {k: v for k, v in show}
|
||||
if guid_source in show.keys():
|
||||
if guid_id == show[guid_source]:
|
||||
for episode in episodes:
|
||||
episode_videos.append(episode)
|
||||
|
||||
return True, episode_videos
|
||||
|
||||
return False, []
|
||||
except Exception:
|
||||
return False, []
|
||||
|
||||
|
||||
def get_video_status(plex_search, video_ids, videos):
|
||||
try:
|
||||
if not generate_guids and not generate_locations:
|
||||
return None
|
||||
|
||||
if generate_locations:
|
||||
for location in plex_search.locations:
|
||||
if (
|
||||
contains_nested(location.split("/")[-1], video_ids["locations"])
|
||||
is not None
|
||||
):
|
||||
for video in videos:
|
||||
if (
|
||||
contains_nested(location.split("/")[-1], video["locations"])
|
||||
is not None
|
||||
):
|
||||
return video["status"]
|
||||
|
||||
if generate_guids:
|
||||
for guid in plex_search.guids:
|
||||
guid_source, guid_id = guid.id.split("://")
|
||||
|
||||
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
||||
if guid_source in video_ids.keys():
|
||||
if guid_id in video_ids[guid_source]:
|
||||
for video in videos:
|
||||
if guid_source in video.keys():
|
||||
if guid_id == video[guid_source]:
|
||||
return video["status"]
|
||||
|
||||
return None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def update_user_watched(user, user_plex, library, videos, dryrun):
|
||||
@@ -128,32 +302,43 @@ def update_user_watched(user, user_plex, library, videos, dryrun):
|
||||
library_videos = user_plex.library.section(library)
|
||||
if videos_movies_ids:
|
||||
for movies_search in library_videos.search(unwatched=True):
|
||||
movie_found = False
|
||||
for movie_location in movies_search.locations:
|
||||
if movie_location.split("/")[-1] in videos_movies_ids["locations"]:
|
||||
movie_found = True
|
||||
break
|
||||
video_status = get_video_status(
|
||||
movies_search, videos_movies_ids, videos
|
||||
)
|
||||
if video_status:
|
||||
if video_status["completed"]:
|
||||
msg = f"Plex: {movies_search.title} as watched for {user.title} in {library}"
|
||||
if not dryrun:
|
||||
logger(msg, 5)
|
||||
movies_search.markWatched()
|
||||
else:
|
||||
logger(msg, 6)
|
||||
|
||||
if not movie_found:
|
||||
for movie_guid in movies_search.guids:
|
||||
movie_guid_source = (
|
||||
re.search(r"(.*)://", movie_guid.id).group(1).lower()
|
||||
log_marked(
|
||||
"Plex",
|
||||
user_plex.friendlyName,
|
||||
user.title,
|
||||
library,
|
||||
movies_search.title,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
movie_guid_id = re.search(r"://(.*)", movie_guid.id).group(1)
|
||||
elif video_status["time"] > 60_000:
|
||||
msg = f"Plex: {movies_search.title} as partially watched for {floor(video_status['time'] / 60_000)} minutes for {user.title} in {library}"
|
||||
if not dryrun:
|
||||
logger(msg, 5)
|
||||
movies_search.updateTimeline(video_status["time"])
|
||||
else:
|
||||
logger(msg, 6)
|
||||
|
||||
# If movie provider source and movie provider id are in videos_movie_ids exactly, then the movie is in the list
|
||||
if movie_guid_source in videos_movies_ids.keys():
|
||||
if movie_guid_id in videos_movies_ids[movie_guid_source]:
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if movie_found:
|
||||
msg = f"{movies_search.title} as watched for {user.title} in {library} for Plex"
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
movies_search.markWatched()
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
log_marked(
|
||||
"Plex",
|
||||
user_plex.friendlyName,
|
||||
user.title,
|
||||
library,
|
||||
movies_search.title,
|
||||
duration=video_status["time"],
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"Plex: Skipping movie {movies_search.title} as it is not in mark list for {user.title}",
|
||||
@@ -162,73 +347,57 @@ def update_user_watched(user, user_plex, library, videos, dryrun):
|
||||
|
||||
if videos_shows_ids and videos_episodes_ids:
|
||||
for show_search in library_videos.search(unwatched=True):
|
||||
show_found = False
|
||||
for show_location in show_search.locations:
|
||||
if show_location.split("/")[-1] in videos_shows_ids["locations"]:
|
||||
show_found = True
|
||||
break
|
||||
|
||||
if not show_found:
|
||||
for show_guid in show_search.guids:
|
||||
show_guid_source = (
|
||||
re.search(r"(.*)://", show_guid.id).group(1).lower()
|
||||
)
|
||||
show_guid_id = re.search(r"://(.*)", show_guid.id).group(1)
|
||||
|
||||
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
||||
if show_guid_source in videos_shows_ids.keys():
|
||||
if show_guid_id in videos_shows_ids[show_guid_source]:
|
||||
show_found = True
|
||||
break
|
||||
|
||||
show_found, episode_videos = find_video(
|
||||
show_search, videos_shows_ids, videos
|
||||
)
|
||||
if show_found:
|
||||
for episode_search in show_search.episodes():
|
||||
episode_found = False
|
||||
video_status = get_video_status(
|
||||
episode_search, videos_episodes_ids, episode_videos
|
||||
)
|
||||
if video_status:
|
||||
if video_status["completed"]:
|
||||
msg = f"Plex: {show_search.title} {episode_search.title} as watched for {user.title} in {library}"
|
||||
if not dryrun:
|
||||
logger(msg, 5)
|
||||
episode_search.markWatched()
|
||||
else:
|
||||
logger(msg, 6)
|
||||
|
||||
for episode_location in episode_search.locations:
|
||||
if (
|
||||
episode_location.split("/")[-1]
|
||||
in videos_episodes_ids["locations"]
|
||||
):
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if not episode_found:
|
||||
for episode_guid in episode_search.guids:
|
||||
episode_guid_source = (
|
||||
re.search(r"(.*)://", episode_guid.id)
|
||||
.group(1)
|
||||
.lower()
|
||||
log_marked(
|
||||
"Plex",
|
||||
user_plex.friendlyName,
|
||||
user.title,
|
||||
library,
|
||||
show_search.title,
|
||||
episode_search.title,
|
||||
)
|
||||
episode_guid_id = re.search(
|
||||
r"://(.*)", episode_guid.id
|
||||
).group(1)
|
||||
|
||||
# If episode provider source and episode provider id are in videos_episodes_ids exactly, then the episode is in the list
|
||||
if episode_guid_source in videos_episodes_ids.keys():
|
||||
if (
|
||||
episode_guid_id
|
||||
in videos_episodes_ids[episode_guid_source]
|
||||
):
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if episode_found:
|
||||
msg = f"{show_search.title} {episode_search.title} as watched for {user.title} in {library} for Plex"
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
episode_search.markWatched()
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
msg = f"Plex: {show_search.title} {episode_search.title} as partially watched for {floor(video_status['time'] / 60_000)} minutes for {user.title} in {library}"
|
||||
if not dryrun:
|
||||
logger(msg, 5)
|
||||
episode_search.updateTimeline(video_status["time"])
|
||||
else:
|
||||
logger(msg, 6)
|
||||
|
||||
log_marked(
|
||||
"Plex",
|
||||
user_plex.friendlyName,
|
||||
user.title,
|
||||
library,
|
||||
show_search.title,
|
||||
episode_search.title,
|
||||
video_status["time"],
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"Plex: Skipping episode {episode_search.title} as it is not in mark list for {user.title}",
|
||||
1,
|
||||
3,
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"Plex: Skipping show {show_search.title} as it is not in mark list for {user.title}",
|
||||
1,
|
||||
3,
|
||||
)
|
||||
|
||||
if not videos_movies_ids and not videos_shows_ids and not videos_episodes_ids:
|
||||
@@ -242,7 +411,7 @@ def update_user_watched(user, user_plex, library, videos, dryrun):
|
||||
f"Plex: Failed to update watched for {user.title} in library {library}, Error: {e}",
|
||||
2,
|
||||
)
|
||||
raise Exception(e)
|
||||
logger(traceback.format_exc(), 2)
|
||||
|
||||
|
||||
# class plex accept base url and token and username and password but default with none
|
||||
@@ -255,6 +424,7 @@ class Plex:
|
||||
password=None,
|
||||
servername=None,
|
||||
ssl_bypass=False,
|
||||
session=None,
|
||||
):
|
||||
self.baseurl = baseurl
|
||||
self.token = token
|
||||
@@ -262,21 +432,20 @@ class Plex:
|
||||
self.password = password
|
||||
self.servername = servername
|
||||
self.ssl_bypass = ssl_bypass
|
||||
self.plex = self.login(self.baseurl, self.token, ssl_bypass)
|
||||
if ssl_bypass:
|
||||
# Session for ssl bypass
|
||||
session = requests.Session()
|
||||
# By pass ssl hostname check https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186
|
||||
session.mount("https://", HostNameIgnoringAdapter())
|
||||
self.session = session
|
||||
self.plex = self.login(self.baseurl, self.token)
|
||||
self.admin_user = self.plex.myPlexAccount()
|
||||
self.users = self.get_users()
|
||||
|
||||
def login(self, baseurl, token, ssl_bypass=False):
|
||||
def login(self, baseurl, token):
|
||||
try:
|
||||
if baseurl and token:
|
||||
# Login via token
|
||||
if ssl_bypass:
|
||||
session = requests.Session()
|
||||
# By pass ssl hostname check https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186
|
||||
session.mount("https://", HostNameIgnoringAdapter())
|
||||
plex = PlexServer(baseurl, token, session=session)
|
||||
else:
|
||||
plex = PlexServer(baseurl, token)
|
||||
plex = PlexServer(baseurl, token, session=self.session)
|
||||
elif self.username and self.password and self.servername:
|
||||
# Login via plex account
|
||||
account = MyPlexAccount(self.username, self.password)
|
||||
@@ -293,6 +462,9 @@ class Plex:
|
||||
logger(f"Plex: Failed to login, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def info(self) -> str:
|
||||
return f"Plex {self.plex.friendlyName}: {self.plex.version}"
|
||||
|
||||
def get_users(self):
|
||||
try:
|
||||
users = self.plex.myPlexAccount().users()
|
||||
@@ -305,57 +477,58 @@ class Plex:
|
||||
logger(f"Plex: Failed to get users, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_watched(
|
||||
self,
|
||||
users,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
):
|
||||
def get_libraries(self):
|
||||
try:
|
||||
output = {}
|
||||
|
||||
libraries = self.plex.library.sections()
|
||||
|
||||
for library in libraries:
|
||||
library_title = library.title
|
||||
library_type = library.type
|
||||
|
||||
output[library_title] = library_type
|
||||
|
||||
return output
|
||||
except Exception as e:
|
||||
logger(f"Plex: Failed to get libraries, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_watched(self, users, sync_libraries):
|
||||
try:
|
||||
# Get all libraries
|
||||
users_watched = {}
|
||||
args = []
|
||||
|
||||
for user in users:
|
||||
if self.admin_user == user:
|
||||
user_plex = self.plex
|
||||
else:
|
||||
user_plex = self.login(
|
||||
self.plex._baseurl, user.get_token(self.plex.machineIdentifier), self.ssl_bypass
|
||||
)
|
||||
token = user.get_token(self.plex.machineIdentifier)
|
||||
if token:
|
||||
user_plex = self.login(
|
||||
self.plex._baseurl,
|
||||
token,
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"Plex: Failed to get token for {user.title}, skipping",
|
||||
2,
|
||||
)
|
||||
users_watched[user.title] = {}
|
||||
continue
|
||||
|
||||
libraries = user_plex.library.sections()
|
||||
|
||||
for library in libraries:
|
||||
library_title = library.title
|
||||
library_type = library.type
|
||||
|
||||
skip_reason = check_skip_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
if skip_reason:
|
||||
logger(
|
||||
f"Plex: Skipping library {library_title} {skip_reason}", 1
|
||||
)
|
||||
if library.title not in sync_libraries:
|
||||
continue
|
||||
|
||||
args.append([get_user_watched, user, user_plex, library])
|
||||
user_watched = get_user_library_watched(user, user_plex, library)
|
||||
|
||||
for user_watched in future_thread_executor(args):
|
||||
for user, user_watched_temp in user_watched.items():
|
||||
if user not in users_watched:
|
||||
users_watched[user] = {}
|
||||
users_watched[user].update(user_watched_temp)
|
||||
for user_watched, user_watched_temp in user_watched.items():
|
||||
if user_watched not in users_watched:
|
||||
users_watched[user_watched] = {}
|
||||
users_watched[user_watched].update(user_watched_temp)
|
||||
|
||||
return users_watched
|
||||
except Exception as e:
|
||||
@@ -378,19 +551,42 @@ class Plex:
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
|
||||
for index, value in enumerate(self.users):
|
||||
if user.lower() == value.title.lower():
|
||||
username_title = (
|
||||
value.username.lower()
|
||||
if value.username
|
||||
else value.title.lower()
|
||||
)
|
||||
|
||||
if user.lower() == username_title:
|
||||
user = self.users[index]
|
||||
break
|
||||
elif user_other and user_other.lower() == value.title.lower():
|
||||
elif user_other and user_other.lower() == username_title:
|
||||
user = self.users[index]
|
||||
break
|
||||
|
||||
if self.admin_user == user:
|
||||
user_plex = self.plex
|
||||
else:
|
||||
user_plex = PlexServer(
|
||||
self.plex._baseurl, user.get_token(self.plex.machineIdentifier)
|
||||
)
|
||||
if isinstance(user, str):
|
||||
logger(
|
||||
f"Plex: {user} is not a plex object, attempting to get object for user",
|
||||
4,
|
||||
)
|
||||
user = self.plex.myPlexAccount().user(user)
|
||||
|
||||
token = user.get_token(self.plex.machineIdentifier)
|
||||
if token:
|
||||
user_plex = PlexServer(
|
||||
self.plex._baseurl,
|
||||
token,
|
||||
session=self.session,
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"Plex: Failed to get token for {user.title}, skipping",
|
||||
2,
|
||||
)
|
||||
continue
|
||||
|
||||
for library, videos in libraries.items():
|
||||
library_other = None
|
||||
@@ -415,12 +611,13 @@ class Plex:
|
||||
else:
|
||||
logger(
|
||||
f"Plex: Library {library} or {library_other} not found in library list",
|
||||
2,
|
||||
1,
|
||||
)
|
||||
continue
|
||||
else:
|
||||
logger(
|
||||
f"Plex: Library {library} not found in library list", 2
|
||||
f"Plex: Library {library} not found in library list",
|
||||
1,
|
||||
)
|
||||
continue
|
||||
|
||||
|
||||
133
src/users.py
Normal file
133
src/users.py
Normal file
@@ -0,0 +1,133 @@
|
||||
from src.functions import (
|
||||
logger,
|
||||
search_mapping,
|
||||
)
|
||||
|
||||
|
||||
def generate_user_list(server):
|
||||
# generate list of users from server 1 and server 2
|
||||
server_type = server[0]
|
||||
server_connection = server[1]
|
||||
|
||||
server_users = []
|
||||
if server_type == "plex":
|
||||
for user in server_connection.users:
|
||||
server_users.append(
|
||||
user.username.lower() if user.username else user.title.lower()
|
||||
)
|
||||
|
||||
elif server_type in ["jellyfin", "emby"]:
|
||||
server_users = [key.lower() for key in server_connection.users.keys()]
|
||||
|
||||
return server_users
|
||||
|
||||
|
||||
def combine_user_lists(server_1_users, server_2_users, user_mapping):
|
||||
# combined list of overlapping users from plex and jellyfin
|
||||
users = {}
|
||||
|
||||
for server_1_user in server_1_users:
|
||||
if user_mapping:
|
||||
mapped_user = search_mapping(user_mapping, server_1_user)
|
||||
if mapped_user in server_2_users:
|
||||
users[server_1_user] = mapped_user
|
||||
continue
|
||||
|
||||
if server_1_user in server_2_users:
|
||||
users[server_1_user] = server_1_user
|
||||
|
||||
for server_2_user in server_2_users:
|
||||
if user_mapping:
|
||||
mapped_user = search_mapping(user_mapping, server_2_user)
|
||||
if mapped_user in server_1_users:
|
||||
users[mapped_user] = server_2_user
|
||||
continue
|
||||
|
||||
if server_2_user in server_1_users:
|
||||
users[server_2_user] = server_2_user
|
||||
|
||||
return users
|
||||
|
||||
|
||||
def filter_user_lists(users, blacklist_users, whitelist_users):
|
||||
users_filtered = {}
|
||||
for user in users:
|
||||
# whitelist_user is not empty and user lowercase is not in whitelist lowercase
|
||||
if len(whitelist_users) > 0:
|
||||
if user not in whitelist_users and users[user] not in whitelist_users:
|
||||
logger(f"{user} or {users[user]} is not in whitelist", 1)
|
||||
continue
|
||||
|
||||
if user not in blacklist_users and users[user] not in blacklist_users:
|
||||
users_filtered[user] = users[user]
|
||||
|
||||
return users_filtered
|
||||
|
||||
|
||||
def generate_server_users(server, users):
|
||||
server_users = None
|
||||
|
||||
if server[0] == "plex":
|
||||
server_users = []
|
||||
for plex_user in server[1].users:
|
||||
username_title = (
|
||||
plex_user.username if plex_user.username else plex_user.title
|
||||
)
|
||||
|
||||
if (
|
||||
username_title.lower() in users.keys()
|
||||
or username_title.lower() in users.values()
|
||||
):
|
||||
server_users.append(plex_user)
|
||||
elif server[0] in ["jellyfin", "emby"]:
|
||||
server_users = {}
|
||||
for jellyfin_user, jellyfin_id in server[1].users.items():
|
||||
if (
|
||||
jellyfin_user.lower() in users.keys()
|
||||
or jellyfin_user.lower() in users.values()
|
||||
):
|
||||
server_users[jellyfin_user] = jellyfin_id
|
||||
|
||||
return server_users
|
||||
|
||||
|
||||
def setup_users(
|
||||
server_1, server_2, blacklist_users, whitelist_users, user_mapping=None
|
||||
):
|
||||
server_1_users = generate_user_list(server_1)
|
||||
server_2_users = generate_user_list(server_2)
|
||||
logger(f"Server 1 users: {server_1_users}", 1)
|
||||
logger(f"Server 2 users: {server_2_users}", 1)
|
||||
|
||||
users = combine_user_lists(server_1_users, server_2_users, user_mapping)
|
||||
logger(f"User list that exist on both servers {users}", 1)
|
||||
|
||||
users_filtered = filter_user_lists(users, blacklist_users, whitelist_users)
|
||||
logger(f"Filtered user list {users_filtered}", 1)
|
||||
|
||||
output_server_1_users = generate_server_users(server_1, users_filtered)
|
||||
output_server_2_users = generate_server_users(server_2, users_filtered)
|
||||
|
||||
# Check if users is none or empty
|
||||
if output_server_1_users is None or len(output_server_1_users) == 0:
|
||||
logger(
|
||||
f"No users found for server 1 {server_1[0]}, users: {server_1_users}, overlapping users {users}, filtered users {users_filtered}, server 1 users {server_1[1].users}"
|
||||
)
|
||||
|
||||
if output_server_2_users is None or len(output_server_2_users) == 0:
|
||||
logger(
|
||||
f"No users found for server 2 {server_2[0]}, users: {server_2_users}, overlapping users {users} filtered users {users_filtered}, server 2 users {server_2[1].users}"
|
||||
)
|
||||
|
||||
if (
|
||||
output_server_1_users is None
|
||||
or len(output_server_1_users) == 0
|
||||
or output_server_2_users is None
|
||||
or len(output_server_2_users) == 0
|
||||
):
|
||||
raise Exception("No users found for one or both servers")
|
||||
|
||||
logger(f"Server 1 users: {output_server_1_users}", 1)
|
||||
logger(f"Server 2 users: {output_server_2_users}", 1)
|
||||
|
||||
return output_server_1_users, output_server_2_users
|
||||
255
src/watched.py
Normal file
255
src/watched.py
Normal file
@@ -0,0 +1,255 @@
|
||||
import copy
|
||||
|
||||
from src.functions import logger, search_mapping, contains_nested
|
||||
|
||||
from src.library import generate_library_guids_dict
|
||||
|
||||
|
||||
def check_remove_entry(video, library, video_index, library_watched_list_2):
|
||||
if video_index is not None:
|
||||
if (
|
||||
library_watched_list_2["completed"][video_index]
|
||||
== video["status"]["completed"]
|
||||
) and (library_watched_list_2["time"][video_index] == video["status"]["time"]):
|
||||
logger(
|
||||
f"Removing {video['title']} from {library} due to exact match",
|
||||
3,
|
||||
)
|
||||
return True
|
||||
elif (
|
||||
library_watched_list_2["completed"][video_index] == True
|
||||
and video["status"]["completed"] == False
|
||||
):
|
||||
logger(
|
||||
f"Removing {video['title']} from {library} due to being complete in one library and not the other",
|
||||
3,
|
||||
)
|
||||
return True
|
||||
elif (
|
||||
library_watched_list_2["completed"][video_index] == False
|
||||
and video["status"]["completed"] == False
|
||||
) and (video["status"]["time"] < library_watched_list_2["time"][video_index]):
|
||||
logger(
|
||||
f"Removing {video['title']} from {library} due to more time watched in one library than the other",
|
||||
3,
|
||||
)
|
||||
return True
|
||||
elif (
|
||||
library_watched_list_2["completed"][video_index] == True
|
||||
and video["status"]["completed"] == True
|
||||
):
|
||||
logger(
|
||||
f"Removing {video['title']} from {library} due to being complete in both libraries",
|
||||
3,
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def cleanup_watched(
|
||||
watched_list_1, watched_list_2, user_mapping=None, library_mapping=None
|
||||
):
|
||||
modified_watched_list_1 = copy.deepcopy(watched_list_1)
|
||||
|
||||
# remove entries from watched_list_1 that are in watched_list_2
|
||||
for user_1 in watched_list_1:
|
||||
user_other = None
|
||||
if user_mapping:
|
||||
user_other = search_mapping(user_mapping, user_1)
|
||||
user_2 = get_other(watched_list_2, user_1, user_other)
|
||||
if user_2 is None:
|
||||
continue
|
||||
|
||||
for library_1 in watched_list_1[user_1]:
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
library_other = search_mapping(library_mapping, library_1)
|
||||
library_2 = get_other(watched_list_2[user_2], library_1, library_other)
|
||||
if library_2 is None:
|
||||
continue
|
||||
|
||||
(
|
||||
_,
|
||||
episode_watched_list_2_keys_dict,
|
||||
movies_watched_list_2_keys_dict,
|
||||
) = generate_library_guids_dict(watched_list_2[user_2][library_2])
|
||||
|
||||
# Movies
|
||||
if isinstance(watched_list_1[user_1][library_1], list):
|
||||
for movie in watched_list_1[user_1][library_1]:
|
||||
movie_index = get_movie_index_in_dict(
|
||||
movie, movies_watched_list_2_keys_dict
|
||||
)
|
||||
if movie_index is not None:
|
||||
if check_remove_entry(
|
||||
movie,
|
||||
library_1,
|
||||
movie_index,
|
||||
movies_watched_list_2_keys_dict,
|
||||
):
|
||||
modified_watched_list_1[user_1][library_1].remove(movie)
|
||||
|
||||
# TV Shows
|
||||
elif isinstance(watched_list_1[user_1][library_1], dict):
|
||||
for show_key_1 in watched_list_1[user_1][library_1].keys():
|
||||
show_key_dict = dict(show_key_1)
|
||||
|
||||
# Filter the episode_watched_list_2_keys_dict dictionary to handle cases
|
||||
# where episode location names are not unique such as S01E01.mkv
|
||||
filtered_episode_watched_list_2_keys_dict = (
|
||||
filter_episode_watched_list_2_keys_dict(
|
||||
episode_watched_list_2_keys_dict, show_key_dict
|
||||
)
|
||||
)
|
||||
for episode in watched_list_1[user_1][library_1][show_key_1]:
|
||||
episode_index = get_episode_index_in_dict(
|
||||
episode, filtered_episode_watched_list_2_keys_dict
|
||||
)
|
||||
if episode_index is not None:
|
||||
if check_remove_entry(
|
||||
episode,
|
||||
library_1,
|
||||
episode_index,
|
||||
episode_watched_list_2_keys_dict,
|
||||
):
|
||||
modified_watched_list_1[user_1][library_1][
|
||||
show_key_1
|
||||
].remove(episode)
|
||||
|
||||
# Remove empty shows
|
||||
if len(modified_watched_list_1[user_1][library_1][show_key_1]) == 0:
|
||||
if show_key_1 in modified_watched_list_1[user_1][library_1]:
|
||||
logger(
|
||||
f"Removing {show_key_dict['title']} because it is empty",
|
||||
3,
|
||||
)
|
||||
del modified_watched_list_1[user_1][library_1][show_key_1]
|
||||
|
||||
for user_1 in watched_list_1:
|
||||
for library_1 in watched_list_1[user_1]:
|
||||
if library_1 in modified_watched_list_1[user_1]:
|
||||
# If library is empty then remove it
|
||||
if len(modified_watched_list_1[user_1][library_1]) == 0:
|
||||
logger(f"Removing {library_1} from {user_1} because it is empty", 1)
|
||||
del modified_watched_list_1[user_1][library_1]
|
||||
|
||||
if user_1 in modified_watched_list_1:
|
||||
# If user is empty delete user
|
||||
if len(modified_watched_list_1[user_1]) == 0:
|
||||
logger(f"Removing {user_1} from watched list 1 because it is empty", 1)
|
||||
del modified_watched_list_1[user_1]
|
||||
|
||||
return modified_watched_list_1
|
||||
|
||||
|
||||
def get_other(watched_list, object_1, object_2):
|
||||
if object_1 in watched_list:
|
||||
return object_1
|
||||
elif object_2 in watched_list:
|
||||
return object_2
|
||||
else:
|
||||
logger(f"{object_1} and {object_2} not found in watched list 2", 1)
|
||||
return None
|
||||
|
||||
|
||||
def get_movie_index_in_dict(movie, movies_watched_list_2_keys_dict):
|
||||
# Iterate through the keys and values of the movie dictionary
|
||||
for movie_key, movie_value in movie.items():
|
||||
# If the key is "locations", check if the "locations" key is present in the movies_watched_list_2_keys_dict dictionary
|
||||
if movie_key == "locations":
|
||||
if "locations" in movies_watched_list_2_keys_dict.keys():
|
||||
# Iterate through the locations in the movie dictionary
|
||||
for location in movie_value:
|
||||
# If the location is in the movies_watched_list_2_keys_dict dictionary, return index of the key
|
||||
return contains_nested(
|
||||
location, movies_watched_list_2_keys_dict["locations"]
|
||||
)
|
||||
|
||||
# If the key is not "locations", check if the movie_key is present in the movies_watched_list_2_keys_dict dictionary
|
||||
else:
|
||||
if movie_key in movies_watched_list_2_keys_dict.keys():
|
||||
# If the movie_value is in the movies_watched_list_2_keys_dict dictionary, return True
|
||||
if movie_value in movies_watched_list_2_keys_dict[movie_key]:
|
||||
return movies_watched_list_2_keys_dict[movie_key].index(movie_value)
|
||||
|
||||
# If the loop completes without finding a match, return False
|
||||
return None
|
||||
|
||||
|
||||
def filter_episode_watched_list_2_keys_dict(
|
||||
episode_watched_list_2_keys_dict, show_key_dict
|
||||
):
|
||||
# If the episode_watched_list_2_keys_dict dictionary is empty, missing show then return an empty dictionary
|
||||
if (
|
||||
len(episode_watched_list_2_keys_dict) == 0
|
||||
or "show" not in episode_watched_list_2_keys_dict.keys()
|
||||
):
|
||||
return {}
|
||||
|
||||
# Filter the episode_watched_list_2_keys_dict dictionary to only include values for the correct show
|
||||
filtered_episode_watched_list_2_keys_dict = {}
|
||||
show_indecies = []
|
||||
|
||||
# Iterate through episode_watched_list_2_keys_dict["show"] and find the indecies that match show_key_dict
|
||||
for show_index, show_value in enumerate(episode_watched_list_2_keys_dict["show"]):
|
||||
# Iterate through the keys and values of the show_value dictionary and check if they match show_key_dict
|
||||
for show_key, show_key_value in show_value.items():
|
||||
if show_key == "locations":
|
||||
# Iterate through the locations in the show_value dictionary
|
||||
for location in show_key_value:
|
||||
# If the location is in the episode_watched_list_2_keys_dict dictionary, return index of the key
|
||||
if (
|
||||
contains_nested(location, show_key_dict["locations"])
|
||||
is not None
|
||||
):
|
||||
show_indecies.append(show_index)
|
||||
break
|
||||
else:
|
||||
if show_key in show_key_dict.keys():
|
||||
if show_key_value == show_key_dict[show_key]:
|
||||
show_indecies.append(show_index)
|
||||
break
|
||||
|
||||
# lists
|
||||
indecies = list(set(show_indecies))
|
||||
|
||||
# If there are no indecies that match the show, return an empty dictionary
|
||||
if len(indecies) == 0:
|
||||
return {}
|
||||
|
||||
# Create a copy of the dictionary with indecies that match the show and none that don't
|
||||
for key, value in episode_watched_list_2_keys_dict.items():
|
||||
if key not in filtered_episode_watched_list_2_keys_dict:
|
||||
filtered_episode_watched_list_2_keys_dict[key] = []
|
||||
|
||||
for index, _ in enumerate(value):
|
||||
if index in indecies:
|
||||
filtered_episode_watched_list_2_keys_dict[key].append(value[index])
|
||||
else:
|
||||
filtered_episode_watched_list_2_keys_dict[key].append(None)
|
||||
|
||||
return filtered_episode_watched_list_2_keys_dict
|
||||
|
||||
|
||||
def get_episode_index_in_dict(episode, episode_watched_list_2_keys_dict):
|
||||
# Iterate through the keys and values of the episode dictionary
|
||||
for episode_key, episode_value in episode.items():
|
||||
if episode_key in episode_watched_list_2_keys_dict.keys():
|
||||
if episode_key == "locations":
|
||||
# Iterate through the locations in the episode dictionary
|
||||
for location in episode_value:
|
||||
# If the location is in the episode_watched_list_2_keys_dict dictionary, return index of the key
|
||||
return contains_nested(
|
||||
location, episode_watched_list_2_keys_dict["locations"]
|
||||
)
|
||||
|
||||
else:
|
||||
# If the episode_value is in the episode_watched_list_2_keys_dict dictionary, return True
|
||||
if episode_value in episode_watched_list_2_keys_dict[episode_key]:
|
||||
return episode_watched_list_2_keys_dict[episode_key].index(
|
||||
episode_value
|
||||
)
|
||||
|
||||
# If the loop completes without finding a match, return False
|
||||
return None
|
||||
116
test/ci_emby.env
Normal file
116
test/ci_emby.env
Normal file
@@ -0,0 +1,116 @@
|
||||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
|
||||
## Additional logging information
|
||||
DEBUG = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "debug"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "True"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
|
||||
# Emby
|
||||
|
||||
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_BASEURL = "http://localhost:8097"
|
||||
|
||||
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "False"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "False"
|
||||
SYNC_FROM_PLEX_TO_EMBY = "False"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "False"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "False"
|
||||
SYNC_FROM_JELLYFIN_TO_EMBY = "False"
|
||||
|
||||
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||
116
test/ci_guids.env
Normal file
116
test/ci_guids.env
Normal file
@@ -0,0 +1,116 @@
|
||||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
|
||||
## Additional logging information
|
||||
DEBUG = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "debug"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "True"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "False"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
|
||||
# Emby
|
||||
|
||||
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_BASEURL = "http://localhost:8097"
|
||||
|
||||
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||
116
test/ci_jellyfin.env
Normal file
116
test/ci_jellyfin.env
Normal file
@@ -0,0 +1,116 @@
|
||||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
|
||||
## Additional logging information
|
||||
DEBUG = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "debug"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "True"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
|
||||
# Emby
|
||||
|
||||
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_BASEURL = "http://localhost:8097"
|
||||
|
||||
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "False"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "False"
|
||||
SYNC_FROM_PLEX_TO_EMBY = "False"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_EMBY_TO_PLEX = "False"
|
||||
SYNC_FROM_EMBY_TO_JELLYFIN = "False"
|
||||
SYNC_FROM_EMBY_TO_EMBY = "False"
|
||||
116
test/ci_locations.env
Normal file
116
test/ci_locations.env
Normal file
@@ -0,0 +1,116 @@
|
||||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
|
||||
## Additional logging information
|
||||
DEBUG = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "debug"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "False"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
|
||||
# Emby
|
||||
|
||||
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_BASEURL = "http://localhost:8097"
|
||||
|
||||
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||
116
test/ci_plex.env
Normal file
116
test/ci_plex.env
Normal file
@@ -0,0 +1,116 @@
|
||||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
|
||||
## Additional logging information
|
||||
DEBUG = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "debug"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "True"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
|
||||
# Emby
|
||||
|
||||
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_BASEURL = "http://localhost:8097"
|
||||
|
||||
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "False"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "False"
|
||||
SYNC_FROM_JELLYFIN_TO_EMBY = "False"
|
||||
|
||||
SYNC_FROM_EMBY_TO_PLEX = "False"
|
||||
SYNC_FROM_EMBY_TO_JELLYFIN = "False"
|
||||
SYNC_FROM_EMBY_TO_EMBY = "False"
|
||||
116
test/ci_write.env
Normal file
116
test/ci_write.env
Normal file
@@ -0,0 +1,116 @@
|
||||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "False"
|
||||
|
||||
## Additional logging information
|
||||
DEBUG = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "debug"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "True"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
|
||||
# Emby
|
||||
|
||||
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_BASEURL = "http://localhost:8097"
|
||||
|
||||
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||
@@ -1 +1 @@
|
||||
pytest
|
||||
pytest==7.3.0
|
||||
|
||||
78
test/test_black_white.py
Normal file
78
test/test_black_white.py
Normal file
@@ -0,0 +1,78 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.black_white import setup_black_white_lists
|
||||
|
||||
|
||||
def test_setup_black_white_lists():
|
||||
# Simple
|
||||
blacklist_library = "library1, library2"
|
||||
whitelist_library = "library1, library2"
|
||||
blacklist_library_type = "library_type1, library_type2"
|
||||
whitelist_library_type = "library_type1, library_type2"
|
||||
blacklist_users = "user1, user2"
|
||||
whitelist_users = "user1, user2"
|
||||
|
||||
(
|
||||
results_blacklist_library,
|
||||
return_whitelist_library,
|
||||
return_blacklist_library_type,
|
||||
return_whitelist_library_type,
|
||||
return_blacklist_users,
|
||||
return_whitelist_users,
|
||||
) = setup_black_white_lists(
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
)
|
||||
|
||||
assert results_blacklist_library == ["library1", "library2"]
|
||||
assert return_whitelist_library == ["library1", "library2"]
|
||||
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_blacklist_users == ["user1", "user2"]
|
||||
assert return_whitelist_users == ["user1", "user2"]
|
||||
|
||||
# Library Mapping and user mapping
|
||||
library_mapping = {"library1": "library3"}
|
||||
user_mapping = {"user1": "user3"}
|
||||
|
||||
(
|
||||
results_blacklist_library,
|
||||
return_whitelist_library,
|
||||
return_blacklist_library_type,
|
||||
return_whitelist_library_type,
|
||||
return_blacklist_users,
|
||||
return_whitelist_users,
|
||||
) = setup_black_white_lists(
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
library_mapping,
|
||||
user_mapping,
|
||||
)
|
||||
|
||||
assert results_blacklist_library == ["library1", "library2", "library3"]
|
||||
assert return_whitelist_library == ["library1", "library2", "library3"]
|
||||
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_blacklist_users == ["user1", "user2", "user3"]
|
||||
assert return_whitelist_users == ["user1", "user2", "user3"]
|
||||
324
test/test_library.py
Normal file
324
test/test_library.py
Normal file
@@ -0,0 +1,324 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.functions import (
|
||||
search_mapping,
|
||||
)
|
||||
|
||||
from src.library import (
|
||||
check_skip_logic,
|
||||
check_blacklist_logic,
|
||||
check_whitelist_logic,
|
||||
show_title_dict,
|
||||
episode_title_dict,
|
||||
movies_title_dict,
|
||||
generate_library_guids_dict,
|
||||
)
|
||||
|
||||
blacklist_library = ["TV Shows"]
|
||||
whitelist_library = ["Movies"]
|
||||
blacklist_library_type = ["episodes"]
|
||||
whitelist_library_type = ["movies"]
|
||||
library_mapping = {"Shows": "TV Shows", "Movie": "Movies"}
|
||||
|
||||
show_list = {
|
||||
frozenset(
|
||||
{
|
||||
("locations", ("The Last of Us",)),
|
||||
("tmdb", "100088"),
|
||||
("imdb", "tt3581920"),
|
||||
("tvdb", "392256"),
|
||||
("title", "The Last of Us"),
|
||||
}
|
||||
): [
|
||||
{
|
||||
"imdb": "tt11957006",
|
||||
"tmdb": "2181581",
|
||||
"tvdb": "8444132",
|
||||
"locations": (
|
||||
(
|
||||
"The Last of Us - S01E01 - When You're Lost in the Darkness WEBDL-1080p.mkv",
|
||||
)
|
||||
),
|
||||
"status": {"completed": True, "time": 0},
|
||||
}
|
||||
]
|
||||
}
|
||||
movie_list = [
|
||||
{
|
||||
"title": "Coco",
|
||||
"imdb": "tt2380307",
|
||||
"tmdb": "354912",
|
||||
"locations": [("Coco (2017) Remux-2160p.mkv", "Coco (2017) Remux-1080p.mkv")],
|
||||
"status": {"completed": True, "time": 0},
|
||||
}
|
||||
]
|
||||
|
||||
show_titles = {
|
||||
"imdb": ["tt3581920"],
|
||||
"locations": [("The Last of Us",)],
|
||||
"tmdb": ["100088"],
|
||||
"tvdb": ["392256"],
|
||||
}
|
||||
episode_titles = {
|
||||
"imdb": ["tt11957006"],
|
||||
"locations": [
|
||||
("The Last of Us - S01E01 - When You're Lost in the Darkness WEBDL-1080p.mkv",)
|
||||
],
|
||||
"tmdb": ["2181581"],
|
||||
"tvdb": ["8444132"],
|
||||
"completed": [True],
|
||||
"time": [0],
|
||||
"show": [
|
||||
{
|
||||
"imdb": "tt3581920",
|
||||
"locations": ("The Last of Us",),
|
||||
"title": "The Last of Us",
|
||||
"tmdb": "100088",
|
||||
"tvdb": "392256",
|
||||
}
|
||||
],
|
||||
}
|
||||
movie_titles = {
|
||||
"imdb": ["tt2380307"],
|
||||
"locations": [
|
||||
[
|
||||
(
|
||||
"Coco (2017) Remux-2160p.mkv",
|
||||
"Coco (2017) Remux-1080p.mkv",
|
||||
)
|
||||
]
|
||||
],
|
||||
"title": ["coco"],
|
||||
"tmdb": ["354912"],
|
||||
"completed": [True],
|
||||
"time": [0],
|
||||
}
|
||||
|
||||
|
||||
def test_check_skip_logic():
|
||||
# Failes
|
||||
library_title = "Test"
|
||||
library_type = "movies"
|
||||
skip_reason = check_skip_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
assert skip_reason == "Test is not in whitelist_library"
|
||||
|
||||
library_title = "Shows"
|
||||
library_type = "episodes"
|
||||
skip_reason = check_skip_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
assert (
|
||||
skip_reason
|
||||
== "episodes is in blacklist_library_type and TV Shows is in blacklist_library and "
|
||||
+ "episodes is not in whitelist_library_type and Shows is not in whitelist_library"
|
||||
)
|
||||
|
||||
# Passes
|
||||
library_title = "Movie"
|
||||
library_type = "movies"
|
||||
skip_reason = check_skip_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
assert skip_reason is None
|
||||
|
||||
|
||||
def test_check_blacklist_logic():
|
||||
# Fails
|
||||
library_title = "Shows"
|
||||
library_type = "episodes"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_blacklist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert (
|
||||
skip_reason
|
||||
== "episodes is in blacklist_library_type and TV Shows is in blacklist_library"
|
||||
)
|
||||
|
||||
library_title = "TV Shows"
|
||||
library_type = "episodes"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_blacklist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert (
|
||||
skip_reason
|
||||
== "episodes is in blacklist_library_type and TV Shows is in blacklist_library"
|
||||
)
|
||||
|
||||
# Passes
|
||||
library_title = "Movie"
|
||||
library_type = "movies"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_blacklist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert skip_reason is None
|
||||
|
||||
library_title = "Movies"
|
||||
library_type = "movies"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_blacklist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert skip_reason is None
|
||||
|
||||
|
||||
def test_check_whitelist_logic():
|
||||
# Fails
|
||||
library_title = "Shows"
|
||||
library_type = "episodes"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_whitelist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert (
|
||||
skip_reason
|
||||
== "episodes is not in whitelist_library_type and Shows is not in whitelist_library"
|
||||
)
|
||||
|
||||
library_title = "TV Shows"
|
||||
library_type = "episodes"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_whitelist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert (
|
||||
skip_reason
|
||||
== "episodes is not in whitelist_library_type and TV Shows is not in whitelist_library"
|
||||
)
|
||||
|
||||
# Passes
|
||||
library_title = "Movie"
|
||||
library_type = "movies"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_whitelist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert skip_reason is None
|
||||
|
||||
library_title = "Movies"
|
||||
library_type = "movies"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_whitelist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert skip_reason is None
|
||||
|
||||
|
||||
def test_show_title_dict():
|
||||
show_titles_dict = show_title_dict(show_list)
|
||||
|
||||
assert show_titles_dict == show_titles
|
||||
|
||||
|
||||
def test_episode_title_dict():
|
||||
episode_titles_dict = episode_title_dict(show_list)
|
||||
|
||||
assert episode_titles_dict == episode_titles
|
||||
|
||||
|
||||
def test_movies_title_dict():
|
||||
movies_titles_dict = movies_title_dict(movie_list)
|
||||
|
||||
assert movies_titles_dict == movie_titles
|
||||
|
||||
|
||||
def test_generate_library_guids_dict():
|
||||
# Test with shows
|
||||
(
|
||||
show_titles_dict,
|
||||
episode_titles_dict,
|
||||
movies_titles_dict,
|
||||
) = generate_library_guids_dict(show_list)
|
||||
|
||||
assert show_titles_dict == show_titles
|
||||
assert episode_titles_dict == episode_titles
|
||||
assert movies_titles_dict == {}
|
||||
|
||||
# Test with movies
|
||||
(
|
||||
show_titles_dict,
|
||||
episode_titles_dict,
|
||||
movies_titles_dict,
|
||||
) = generate_library_guids_dict(movie_list)
|
||||
|
||||
assert show_titles_dict == {}
|
||||
assert episode_titles_dict == {}
|
||||
assert movies_titles_dict == movie_titles
|
||||
@@ -1,78 +1,78 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.main import setup_black_white_lists
|
||||
|
||||
|
||||
def test_setup_black_white_lists():
|
||||
# Simple
|
||||
blacklist_library = "library1, library2"
|
||||
whitelist_library = "library1, library2"
|
||||
blacklist_library_type = "library_type1, library_type2"
|
||||
whitelist_library_type = "library_type1, library_type2"
|
||||
blacklist_users = "user1, user2"
|
||||
whitelist_users = "user1, user2"
|
||||
|
||||
(
|
||||
results_blacklist_library,
|
||||
return_whitelist_library,
|
||||
return_blacklist_library_type,
|
||||
return_whitelist_library_type,
|
||||
return_blacklist_users,
|
||||
return_whitelist_users,
|
||||
) = setup_black_white_lists(
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
)
|
||||
|
||||
assert results_blacklist_library == ["library1", "library2"]
|
||||
assert return_whitelist_library == ["library1", "library2"]
|
||||
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_blacklist_users == ["user1", "user2"]
|
||||
assert return_whitelist_users == ["user1", "user2"]
|
||||
|
||||
# Library Mapping and user mapping
|
||||
library_mapping = {"library1": "library3"}
|
||||
user_mapping = {"user1": "user3"}
|
||||
|
||||
(
|
||||
results_blacklist_library,
|
||||
return_whitelist_library,
|
||||
return_blacklist_library_type,
|
||||
return_whitelist_library_type,
|
||||
return_blacklist_users,
|
||||
return_whitelist_users,
|
||||
) = setup_black_white_lists(
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
library_mapping,
|
||||
user_mapping,
|
||||
)
|
||||
|
||||
assert results_blacklist_library == ["library1", "library2", "library3"]
|
||||
assert return_whitelist_library == ["library1", "library2", "library3"]
|
||||
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_blacklist_users == ["user1", "user2", "user3"]
|
||||
assert return_whitelist_users == ["user1", "user2", "user3"]
|
||||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.black_white import setup_black_white_lists
|
||||
|
||||
|
||||
def test_setup_black_white_lists():
|
||||
# Simple
|
||||
blacklist_library = "library1, library2"
|
||||
whitelist_library = "library1, library2"
|
||||
blacklist_library_type = "library_type1, library_type2"
|
||||
whitelist_library_type = "library_type1, library_type2"
|
||||
blacklist_users = "user1, user2"
|
||||
whitelist_users = "user1, user2"
|
||||
|
||||
(
|
||||
results_blacklist_library,
|
||||
return_whitelist_library,
|
||||
return_blacklist_library_type,
|
||||
return_whitelist_library_type,
|
||||
return_blacklist_users,
|
||||
return_whitelist_users,
|
||||
) = setup_black_white_lists(
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
)
|
||||
|
||||
assert results_blacklist_library == ["library1", "library2"]
|
||||
assert return_whitelist_library == ["library1", "library2"]
|
||||
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_blacklist_users == ["user1", "user2"]
|
||||
assert return_whitelist_users == ["user1", "user2"]
|
||||
|
||||
# Library Mapping and user mapping
|
||||
library_mapping = {"library1": "library3"}
|
||||
user_mapping = {"user1": "user3"}
|
||||
|
||||
(
|
||||
results_blacklist_library,
|
||||
return_whitelist_library,
|
||||
return_blacklist_library_type,
|
||||
return_whitelist_library_type,
|
||||
return_blacklist_users,
|
||||
return_whitelist_users,
|
||||
) = setup_black_white_lists(
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
library_mapping,
|
||||
user_mapping,
|
||||
)
|
||||
|
||||
assert results_blacklist_library == ["library1", "library2", "library3"]
|
||||
assert return_whitelist_library == ["library1", "library2", "library3"]
|
||||
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_blacklist_users == ["user1", "user2", "user3"]
|
||||
assert return_whitelist_users == ["user1", "user2", "user3"]
|
||||
|
||||
@@ -1,301 +0,0 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.main import cleanup_watched
|
||||
|
||||
tv_shows_watched_list_1 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
}
|
||||
): {
|
||||
"Season 1": [
|
||||
{
|
||||
"imdb": "tt0550489",
|
||||
"tmdb": "282843",
|
||||
"tvdb": "176357",
|
||||
"locations": (
|
||||
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||
),
|
||||
},
|
||||
{
|
||||
"imdb": "tt0550487",
|
||||
"tmdb": "282861",
|
||||
"tvdb": "300385",
|
||||
"locations": ("Criminal Minds S01E02 Compulsion WEBDL-720p.mkv",),
|
||||
},
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
{"locations": ("Test S01E01.mkv",)},
|
||||
{"locations": ("Test S01E02.mkv",)},
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
movies_watched_list_1 = [
|
||||
{
|
||||
"imdb": "tt2380307",
|
||||
"tmdb": "354912",
|
||||
"title": "Coco",
|
||||
"locations": ("Coco (2017) Remux-1080p.mkv",),
|
||||
},
|
||||
{
|
||||
"tmdbcollection": "448150",
|
||||
"imdb": "tt1431045",
|
||||
"tmdb": "293660",
|
||||
"title": "Deadpool",
|
||||
"locations": ("Deadpool (2016) Remux-1080p.mkv",),
|
||||
},
|
||||
]
|
||||
|
||||
tv_shows_watched_list_2 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
}
|
||||
): {
|
||||
"Season 1": [
|
||||
{
|
||||
"imdb": "tt0550487",
|
||||
"tmdb": "282861",
|
||||
"tvdb": "300385",
|
||||
"locations": ("Criminal Minds S01E02 Compulsion WEBDL-720p.mkv",),
|
||||
},
|
||||
{
|
||||
"imdb": "tt0550498",
|
||||
"tmdb": "282865",
|
||||
"tvdb": "300474",
|
||||
"locations": (
|
||||
"Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",
|
||||
),
|
||||
},
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
{"locations": ("Test S01E02.mkv",)},
|
||||
{"locations": ("Test S01E03.mkv",)},
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
movies_watched_list_2 = [
|
||||
{
|
||||
"imdb": "tt2380307",
|
||||
"tmdb": "354912",
|
||||
"title": "Coco",
|
||||
"locations": ("Coco (2017) Remux-1080p.mkv",),
|
||||
},
|
||||
{
|
||||
"imdb": "tt0384793",
|
||||
"tmdb": "9788",
|
||||
"tvdb": "9103",
|
||||
"title": "Accepted",
|
||||
"locations": ("Accepted (2006) Remux-1080p.mkv",),
|
||||
},
|
||||
]
|
||||
|
||||
# Test to see if objects get deleted all the way up to the root.
|
||||
tv_shows_2_watched_list_1 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
}
|
||||
): {
|
||||
"Season 1": [
|
||||
{
|
||||
"imdb": "tt0550489",
|
||||
"tmdb": "282843",
|
||||
"tvdb": "176357",
|
||||
"locations": (
|
||||
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||
),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
expected_tv_show_watched_list_1 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
}
|
||||
): {
|
||||
"Season 1": [
|
||||
{
|
||||
"imdb": "tt0550489",
|
||||
"tmdb": "282843",
|
||||
"tvdb": "176357",
|
||||
"locations": (
|
||||
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||
),
|
||||
}
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [{"locations": ("Test S01E01.mkv",)}]
|
||||
},
|
||||
}
|
||||
|
||||
expected_movie_watched_list_1 = [
|
||||
{
|
||||
"tmdbcollection": "448150",
|
||||
"imdb": "tt1431045",
|
||||
"tmdb": "293660",
|
||||
"title": "Deadpool",
|
||||
"locations": ("Deadpool (2016) Remux-1080p.mkv",),
|
||||
}
|
||||
]
|
||||
|
||||
expected_tv_show_watched_list_2 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
}
|
||||
): {
|
||||
"Season 1": [
|
||||
{
|
||||
"imdb": "tt0550498",
|
||||
"tmdb": "282865",
|
||||
"tvdb": "300474",
|
||||
"locations": (
|
||||
"Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",
|
||||
),
|
||||
}
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [{"locations": ("Test S01E03.mkv",)}]
|
||||
},
|
||||
}
|
||||
|
||||
expected_movie_watched_list_2 = [
|
||||
{
|
||||
"imdb": "tt0384793",
|
||||
"tmdb": "9788",
|
||||
"tvdb": "9103",
|
||||
"title": "Accepted",
|
||||
"locations": ("Accepted (2006) Remux-1080p.mkv",),
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_simple_cleanup_watched():
|
||||
user_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_1,
|
||||
"Movies": movies_watched_list_1,
|
||||
"Other Shows": tv_shows_2_watched_list_1,
|
||||
},
|
||||
}
|
||||
user_watched_list_2 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_2,
|
||||
"Movies": movies_watched_list_2,
|
||||
"Other Shows": tv_shows_2_watched_list_1,
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_1,
|
||||
"Movies": expected_movie_watched_list_1,
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_2 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_2,
|
||||
"Movies": expected_movie_watched_list_2,
|
||||
}
|
||||
}
|
||||
|
||||
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2)
|
||||
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1)
|
||||
|
||||
assert return_watched_list_1 == expected_watched_list_1
|
||||
assert return_watched_list_2 == expected_watched_list_2
|
||||
|
||||
|
||||
def test_mapping_cleanup_watched():
|
||||
user_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_1,
|
||||
"Movies": movies_watched_list_1,
|
||||
"Other Shows": tv_shows_2_watched_list_1,
|
||||
},
|
||||
}
|
||||
user_watched_list_2 = {
|
||||
"user2": {
|
||||
"Shows": tv_shows_watched_list_2,
|
||||
"Movies": movies_watched_list_2,
|
||||
"Other Shows": tv_shows_2_watched_list_1,
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_1,
|
||||
"Movies": expected_movie_watched_list_1,
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_2 = {
|
||||
"user2": {
|
||||
"Shows": expected_tv_show_watched_list_2,
|
||||
"Movies": expected_movie_watched_list_2,
|
||||
}
|
||||
}
|
||||
|
||||
user_mapping = {"user1": "user2"}
|
||||
library_mapping = {"TV Shows": "Shows"}
|
||||
|
||||
return_watched_list_1 = cleanup_watched(
|
||||
user_watched_list_1,
|
||||
user_watched_list_2,
|
||||
user_mapping=user_mapping,
|
||||
library_mapping=library_mapping,
|
||||
)
|
||||
return_watched_list_2 = cleanup_watched(
|
||||
user_watched_list_2,
|
||||
user_watched_list_1,
|
||||
user_mapping=user_mapping,
|
||||
library_mapping=library_mapping,
|
||||
)
|
||||
|
||||
assert return_watched_list_1 == expected_watched_list_1
|
||||
assert return_watched_list_2 == expected_watched_list_2
|
||||
39
test/test_users.py
Normal file
39
test/test_users.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.users import (
|
||||
combine_user_lists,
|
||||
filter_user_lists,
|
||||
)
|
||||
|
||||
|
||||
def test_combine_user_lists():
|
||||
server_1_users = ["test", "test3", "luigi311"]
|
||||
server_2_users = ["luigi311", "test2", "test3"]
|
||||
user_mapping = {"test2": "test"}
|
||||
|
||||
combined = combine_user_lists(server_1_users, server_2_users, user_mapping)
|
||||
|
||||
assert combined == {"luigi311": "luigi311", "test": "test2", "test3": "test3"}
|
||||
|
||||
|
||||
def test_filter_user_lists():
|
||||
users = {"luigi311": "luigi311", "test": "test2", "test3": "test3"}
|
||||
blacklist_users = ["test3"]
|
||||
whitelist_users = ["test", "luigi311"]
|
||||
|
||||
filtered = filter_user_lists(users, blacklist_users, whitelist_users)
|
||||
|
||||
assert filtered == {"test": "test2", "luigi311": "luigi311"}
|
||||
543
test/test_watched.py
Normal file
543
test/test_watched.py
Normal file
@@ -0,0 +1,543 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.watched import cleanup_watched
|
||||
|
||||
tv_shows_watched_list_1 = {
|
||||
frozenset(
|
||||
{
|
||||
("locations", ("Doctor Who (2005) {tvdb-78804} {imdb-tt0436992}",)),
|
||||
("imdb", "tt0436992"),
|
||||
("tmdb", "57243"),
|
||||
("tvdb", "78804"),
|
||||
("title", "Doctor Who (2005)"),
|
||||
}
|
||||
): [
|
||||
{
|
||||
"imdb": "tt0563001",
|
||||
"tmdb": "968589",
|
||||
"tvdb": "295296",
|
||||
"title": "The Unquiet Dead",
|
||||
"locations": ("S01E03.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt0562985",
|
||||
"tmdb": "968590",
|
||||
"tvdb": "295297",
|
||||
"title": "Aliens of London (1)",
|
||||
"locations": ("S01E04.mkv",),
|
||||
"status": {"completed": False, "time": 240000},
|
||||
},
|
||||
{
|
||||
"imdb": "tt0563003",
|
||||
"tmdb": "968592",
|
||||
"tvdb": "295298",
|
||||
"title": "World War Three (2)",
|
||||
"locations": ("S01E05.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
],
|
||||
frozenset(
|
||||
{
|
||||
("title", "Monarch: Legacy of Monsters"),
|
||||
("imdb", "tt17220216"),
|
||||
("tvdb", "422598"),
|
||||
("tmdb", "202411"),
|
||||
(
|
||||
"locations",
|
||||
("Monarch - Legacy of Monsters {tvdb-422598} {imdb-tt17220216}",),
|
||||
),
|
||||
}
|
||||
): [
|
||||
{
|
||||
"imdb": "tt21255044",
|
||||
"tmdb": "4661246",
|
||||
"tvdb": "10009418",
|
||||
"title": "Secrets and Lies",
|
||||
"locations": ("S01E03.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt21255050",
|
||||
"tmdb": "4712059",
|
||||
"tvdb": "10009419",
|
||||
"title": "Parallels and Interiors",
|
||||
"locations": ("S01E04.mkv",),
|
||||
"status": {"completed": False, "time": 240000},
|
||||
},
|
||||
{
|
||||
"imdb": "tt23787572",
|
||||
"tmdb": "4712061",
|
||||
"tvdb": "10009420",
|
||||
"title": "The Way Out",
|
||||
"locations": ("S01E05.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
],
|
||||
frozenset(
|
||||
{
|
||||
("tmdb", "125928"),
|
||||
("imdb", "tt14681924"),
|
||||
("tvdb", "403172"),
|
||||
(
|
||||
"locations",
|
||||
("My Adventures with Superman {tvdb-403172} {imdb-tt14681924}",),
|
||||
),
|
||||
("title", "My Adventures with Superman"),
|
||||
}
|
||||
): [
|
||||
{
|
||||
"imdb": "tt15699926",
|
||||
"tmdb": "3070048",
|
||||
"tvdb": "8438181",
|
||||
"title": "Adventures of a Normal Man (1)",
|
||||
"locations": ("S01E01.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt20413322",
|
||||
"tmdb": "4568681",
|
||||
"tvdb": "9829910",
|
||||
"title": "Adventures of a Normal Man (2)",
|
||||
"locations": ("S01E02.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt20413328",
|
||||
"tmdb": "4497012",
|
||||
"tvdb": "9870382",
|
||||
"title": "My Interview with Superman",
|
||||
"locations": ("S01E03.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
tv_shows_watched_list_2 = {
|
||||
frozenset(
|
||||
{
|
||||
("locations", ("Doctor Who (2005) {tvdb-78804} {imdb-tt0436992}",)),
|
||||
("imdb", "tt0436992"),
|
||||
("tmdb", "57243"),
|
||||
("title", "Doctor Who"),
|
||||
("tvdb", "78804"),
|
||||
("tvrage", "3332"),
|
||||
}
|
||||
): [
|
||||
{
|
||||
"tvdb": "295294",
|
||||
"imdb": "tt0562992",
|
||||
"title": "Rose",
|
||||
"locations": ("S01E01.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"tvdb": "295295",
|
||||
"imdb": "tt0562997",
|
||||
"title": "The End of the World",
|
||||
"locations": ("S01E02.mkv",),
|
||||
"status": {"completed": False, "time": 300670},
|
||||
},
|
||||
{
|
||||
"tvdb": "295298",
|
||||
"imdb": "tt0563003",
|
||||
"title": "World War Three (2)",
|
||||
"locations": ("S01E05.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
],
|
||||
frozenset(
|
||||
{
|
||||
("title", "Monarch: Legacy of Monsters"),
|
||||
("imdb", "tt17220216"),
|
||||
("tvdb", "422598"),
|
||||
("tmdb", "202411"),
|
||||
(
|
||||
"locations",
|
||||
("Monarch - Legacy of Monsters {tvdb-422598} {imdb-tt17220216}",),
|
||||
),
|
||||
}
|
||||
): [
|
||||
{
|
||||
"tvdb": "9959300",
|
||||
"imdb": "tt20412166",
|
||||
"title": "Aftermath",
|
||||
"locations": ("S01E01.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"tvdb": "10009417",
|
||||
"imdb": "tt22866594",
|
||||
"title": "Departure",
|
||||
"locations": ("S01E02.mkv",),
|
||||
"status": {"completed": False, "time": 300741},
|
||||
},
|
||||
{
|
||||
"tvdb": "10009420",
|
||||
"imdb": "tt23787572",
|
||||
"title": "The Way Out",
|
||||
"locations": ("S01E05.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
],
|
||||
frozenset(
|
||||
{
|
||||
("tmdb", "125928"),
|
||||
("imdb", "tt14681924"),
|
||||
("tvdb", "403172"),
|
||||
(
|
||||
"locations",
|
||||
("My Adventures with Superman {tvdb-403172} {imdb-tt14681924}",),
|
||||
),
|
||||
("title", "My Adventures with Superman"),
|
||||
}
|
||||
): [
|
||||
{
|
||||
"tvdb": "8438181",
|
||||
"imdb": "tt15699926",
|
||||
"title": "Adventures of a Normal Man (1)",
|
||||
"locations": ("S01E01.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"tvdb": "9829910",
|
||||
"imdb": "tt20413322",
|
||||
"title": "Adventures of a Normal Man (2)",
|
||||
"locations": ("S01E02.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"tvdb": "9870382",
|
||||
"imdb": "tt20413328",
|
||||
"title": "My Interview with Superman",
|
||||
"locations": ("S01E03.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
expected_tv_show_watched_list_1 = {
|
||||
frozenset(
|
||||
{
|
||||
("locations", ("Doctor Who (2005) {tvdb-78804} {imdb-tt0436992}",)),
|
||||
("imdb", "tt0436992"),
|
||||
("tmdb", "57243"),
|
||||
("tvdb", "78804"),
|
||||
("title", "Doctor Who (2005)"),
|
||||
}
|
||||
): [
|
||||
{
|
||||
"imdb": "tt0563001",
|
||||
"tmdb": "968589",
|
||||
"tvdb": "295296",
|
||||
"title": "The Unquiet Dead",
|
||||
"locations": ("S01E03.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt0562985",
|
||||
"tmdb": "968590",
|
||||
"tvdb": "295297",
|
||||
"title": "Aliens of London (1)",
|
||||
"locations": ("S01E04.mkv",),
|
||||
"status": {"completed": False, "time": 240000},
|
||||
},
|
||||
],
|
||||
frozenset(
|
||||
{
|
||||
("title", "Monarch: Legacy of Monsters"),
|
||||
("imdb", "tt17220216"),
|
||||
("tvdb", "422598"),
|
||||
("tmdb", "202411"),
|
||||
(
|
||||
"locations",
|
||||
("Monarch - Legacy of Monsters {tvdb-422598} {imdb-tt17220216}",),
|
||||
),
|
||||
}
|
||||
): [
|
||||
{
|
||||
"imdb": "tt21255044",
|
||||
"tmdb": "4661246",
|
||||
"tvdb": "10009418",
|
||||
"title": "Secrets and Lies",
|
||||
"locations": ("S01E03.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt21255050",
|
||||
"tmdb": "4712059",
|
||||
"tvdb": "10009419",
|
||||
"title": "Parallels and Interiors",
|
||||
"locations": ("S01E04.mkv",),
|
||||
"status": {"completed": False, "time": 240000},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
expected_tv_show_watched_list_2 = {
|
||||
frozenset(
|
||||
{
|
||||
("locations", ("Doctor Who (2005) {tvdb-78804} {imdb-tt0436992}",)),
|
||||
("imdb", "tt0436992"),
|
||||
("tmdb", "57243"),
|
||||
("title", "Doctor Who"),
|
||||
("tvdb", "78804"),
|
||||
("tvrage", "3332"),
|
||||
}
|
||||
): [
|
||||
{
|
||||
"tvdb": "295294",
|
||||
"imdb": "tt0562992",
|
||||
"title": "Rose",
|
||||
"locations": ("S01E01.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"tvdb": "295295",
|
||||
"imdb": "tt0562997",
|
||||
"title": "The End of the World",
|
||||
"locations": ("S01E02.mkv",),
|
||||
"status": {"completed": False, "time": 300670},
|
||||
},
|
||||
],
|
||||
frozenset(
|
||||
{
|
||||
("title", "Monarch: Legacy of Monsters"),
|
||||
("imdb", "tt17220216"),
|
||||
("tvdb", "422598"),
|
||||
("tmdb", "202411"),
|
||||
(
|
||||
"locations",
|
||||
("Monarch - Legacy of Monsters {tvdb-422598} {imdb-tt17220216}",),
|
||||
),
|
||||
}
|
||||
): [
|
||||
{
|
||||
"tvdb": "9959300",
|
||||
"imdb": "tt20412166",
|
||||
"title": "Aftermath",
|
||||
"locations": ("S01E01.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"tvdb": "10009417",
|
||||
"imdb": "tt22866594",
|
||||
"title": "Departure",
|
||||
"locations": ("S01E02.mkv",),
|
||||
"status": {"completed": False, "time": 300741},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
movies_watched_list_1 = [
|
||||
{
|
||||
"imdb": "tt1254207",
|
||||
"tmdb": "10378",
|
||||
"tvdb": "12352",
|
||||
"title": "Big Buck Bunny",
|
||||
"locations": ("Big Buck Bunny.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt16431870",
|
||||
"tmdb": "1029575",
|
||||
"tvdb": "351194",
|
||||
"title": "The Family Plan",
|
||||
"locations": ("The Family Plan (2023).mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt5537002",
|
||||
"tmdb": "466420",
|
||||
"tvdb": "135852",
|
||||
"title": "Killers of the Flower Moon",
|
||||
"locations": ("Killers of the Flower Moon (2023).mkv",),
|
||||
"status": {"completed": False, "time": 240000},
|
||||
},
|
||||
]
|
||||
|
||||
movies_watched_list_2 = [
|
||||
{
|
||||
"imdb": "tt16431870",
|
||||
"tmdb": "1029575",
|
||||
"title": "The Family Plan",
|
||||
"locations": ("The Family Plan (2023).mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt4589218",
|
||||
"tmdb": "507089",
|
||||
"title": "Five Nights at Freddy's",
|
||||
"locations": ("Five Nights at Freddy's (2023).mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt10545296",
|
||||
"tmdb": "695721",
|
||||
"tmdbcollection": "131635",
|
||||
"title": "The Hunger Games: The Ballad of Songbirds & Snakes",
|
||||
"locations": ("The Hunger Games The Ballad of Songbirds & Snakes (2023).mkv",),
|
||||
"status": {"completed": False, "time": 301215},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
expected_movie_watched_list_1 = [
|
||||
{
|
||||
"imdb": "tt1254207",
|
||||
"tmdb": "10378",
|
||||
"tvdb": "12352",
|
||||
"title": "Big Buck Bunny",
|
||||
"locations": ("Big Buck Bunny.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt5537002",
|
||||
"tmdb": "466420",
|
||||
"tvdb": "135852",
|
||||
"title": "Killers of the Flower Moon",
|
||||
"locations": ("Killers of the Flower Moon (2023).mkv",),
|
||||
"status": {"completed": False, "time": 240000},
|
||||
},
|
||||
]
|
||||
|
||||
expected_movie_watched_list_2 = [
|
||||
{
|
||||
"imdb": "tt4589218",
|
||||
"tmdb": "507089",
|
||||
"title": "Five Nights at Freddy's",
|
||||
"locations": ("Five Nights at Freddy's (2023).mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt10545296",
|
||||
"tmdb": "695721",
|
||||
"tmdbcollection": "131635",
|
||||
"title": "The Hunger Games: The Ballad of Songbirds & Snakes",
|
||||
"locations": ("The Hunger Games The Ballad of Songbirds & Snakes (2023).mkv",),
|
||||
"status": {"completed": False, "time": 301215},
|
||||
},
|
||||
]
|
||||
|
||||
# Test to see if objects get deleted all the way up to the root.
|
||||
tv_shows_2_watched_list_1 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
}
|
||||
): [
|
||||
{
|
||||
"imdb": "tt0550489",
|
||||
"tmdb": "282843",
|
||||
"tvdb": "176357",
|
||||
"title": "Extreme Aggressor",
|
||||
"locations": ("Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def test_simple_cleanup_watched():
|
||||
user_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_1,
|
||||
"Movies": movies_watched_list_1,
|
||||
"Other Shows": tv_shows_2_watched_list_1,
|
||||
},
|
||||
}
|
||||
user_watched_list_2 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_2,
|
||||
"Movies": movies_watched_list_2,
|
||||
"Other Shows": tv_shows_2_watched_list_1,
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_1,
|
||||
"Movies": expected_movie_watched_list_1,
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_2 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_2,
|
||||
"Movies": expected_movie_watched_list_2,
|
||||
}
|
||||
}
|
||||
|
||||
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2)
|
||||
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1)
|
||||
|
||||
assert return_watched_list_1 == expected_watched_list_1
|
||||
assert return_watched_list_2 == expected_watched_list_2
|
||||
|
||||
|
||||
def test_mapping_cleanup_watched():
|
||||
user_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_1,
|
||||
"Movies": movies_watched_list_1,
|
||||
"Other Shows": tv_shows_2_watched_list_1,
|
||||
},
|
||||
}
|
||||
user_watched_list_2 = {
|
||||
"user2": {
|
||||
"Shows": tv_shows_watched_list_2,
|
||||
"Movies": movies_watched_list_2,
|
||||
"Other Shows": tv_shows_2_watched_list_1,
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_1,
|
||||
"Movies": expected_movie_watched_list_1,
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_2 = {
|
||||
"user2": {
|
||||
"Shows": expected_tv_show_watched_list_2,
|
||||
"Movies": expected_movie_watched_list_2,
|
||||
}
|
||||
}
|
||||
|
||||
user_mapping = {"user1": "user2"}
|
||||
library_mapping = {"TV Shows": "Shows"}
|
||||
|
||||
return_watched_list_1 = cleanup_watched(
|
||||
user_watched_list_1,
|
||||
user_watched_list_2,
|
||||
user_mapping=user_mapping,
|
||||
library_mapping=library_mapping,
|
||||
)
|
||||
return_watched_list_2 = cleanup_watched(
|
||||
user_watched_list_2,
|
||||
user_watched_list_1,
|
||||
user_mapping=user_mapping,
|
||||
library_mapping=library_mapping,
|
||||
)
|
||||
|
||||
assert return_watched_list_1 == expected_watched_list_1
|
||||
assert return_watched_list_2 == expected_watched_list_2
|
||||
212
test/validate_ci_marklog.py
Normal file
212
test/validate_ci_marklog.py
Normal file
@@ -0,0 +1,212 @@
|
||||
# Check the mark.log file that is generated by the CI to make sure it contains the expected values
|
||||
|
||||
import os, argparse
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Check the mark.log file that is generated by the CI to make sure it contains the expected values"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--guids", action="store_true", help="Check the mark.log file for guids"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--locations", action="store_true", help="Check the mark.log file for locations"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--write", action="store_true", help="Check the mark.log file for write-run"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--plex", action="store_true", help="Check the mark.log file for Plex"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--jellyfin", action="store_true", help="Check the mark.log file for Jellyfin"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--emby", action="store_true", help="Check the mark.log file for Emby"
|
||||
)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def read_marklog():
|
||||
marklog = os.path.join(os.getcwd(), "mark.log")
|
||||
with open(marklog, "r") as f:
|
||||
lines = f.readlines()
|
||||
return lines
|
||||
|
||||
|
||||
def check_marklog(lines, expected_values):
|
||||
try:
|
||||
# Check to make sure the marklog contains all the expected values and nothing else
|
||||
found_values = []
|
||||
for line in lines:
|
||||
# Remove the newline character
|
||||
line = line.strip()
|
||||
if line not in expected_values:
|
||||
raise Exception("Line not found in marklog: " + line)
|
||||
|
||||
found_values.append(line)
|
||||
|
||||
# Check to make sure the marklog contains the same number of values as the expected values
|
||||
if len(found_values) != len(expected_values):
|
||||
raise Exception(
|
||||
"Marklog did not contain the same number of values as the expected values, found "
|
||||
+ str(len(found_values))
|
||||
+ " values, expected "
|
||||
+ str(len(expected_values))
|
||||
+ " values\n"
|
||||
+ "\n".join(found_values)
|
||||
)
|
||||
|
||||
# Check that the two lists contain the same values
|
||||
if sorted(found_values) != sorted(expected_values):
|
||||
raise Exception(
|
||||
"Marklog did not contain the same values as the expected values, found:\n"
|
||||
+ "\n".join(sorted(found_values))
|
||||
+ "\n\nExpected:\n"
|
||||
+ "\n".join(sorted(expected_values))
|
||||
)
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
expected_jellyfin = [
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom Movies/Movie Two (2021)",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom TV Shows/Greatest Show Ever 3000/Episode 2",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Movies/Five Nights at Freddy's",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Movies/The Hunger Games: The Ballad of Songbirds & Snakes/301215",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Doctor Who (2005)/Rose",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Doctor Who (2005)/The End of the World/300670",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Aftermath",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Departure/300741",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom Movies/Movie Two",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom TV Shows/Greatest Show Ever (3000)/S01E02",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/The Family Plan",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/Five Nights at Freddy's",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/The Hunger Games: The Ballad of Songbirds & Snakes/5",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/Rose",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/The End of the World/5",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Departure/5",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/The Way Out",
|
||||
]
|
||||
expected_emby = [
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom Movies/Movie Three (2022)",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom TV Shows/Greatest Show Ever 3000/Episode 3",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Movies/Tears of Steel",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Aftermath",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Parallels and Interiors/240429",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom Movies/Movie Three (2022)",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom TV Shows/Greatest Show Ever (3000)/S01E03",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Tears of Steel",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Parallels and Interiors/4",
|
||||
]
|
||||
expected_plex = [
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Big Buck Bunny",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Killers of the Flower Moon/4",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom TV Shows/Greatest Show Ever (3000)/S01E01",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Doctor Who/The Unquiet Dead",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Doctor Who/Aliens of London (1)/4",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Secrets and Lies",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Parallels and Interiors/4",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom Movies/Movie One (2020)",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/Big Buck Bunny",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/The Family Plan",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/Killers of the Flower Moon/4",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom TV Shows/Greatest Show Ever (3000)/S01E01",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/The Unquiet Dead",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/Aliens of London (1)/4",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Secrets and Lies",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/The Way Out",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom Movies/Movie One",
|
||||
]
|
||||
|
||||
expected_locations = expected_emby + expected_plex + expected_jellyfin
|
||||
# Remove Custom Movies/TV Shows as they should not have guids
|
||||
expected_guids = [item for item in expected_locations if "Custom" not in item ]
|
||||
|
||||
|
||||
expected_write = [
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom Movies/Movie Two (2021)",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom TV Shows/Greatest Show Ever 3000/Episode 2",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Movies/Five Nights at Freddy's",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Movies/The Hunger Games: The Ballad of Songbirds & Snakes/301215",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Doctor Who (2005)/Rose",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Doctor Who (2005)/The End of the World/300670",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Aftermath",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Departure/300741",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Big Buck Bunny",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Killers of the Flower Moon/4",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom TV Shows/Greatest Show Ever (3000)/S01E01",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Doctor Who/The Unquiet Dead",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Doctor Who/Aliens of London (1)/4",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Secrets and Lies",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Parallels and Interiors/4",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom Movies/Movie One (2020)",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom Movies/Movie Three (2022)",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom TV Shows/Greatest Show Ever 3000/Episode 3",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Movies/Tears of Steel",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Parallels and Interiors/240429",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/Big Buck Bunny",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/The Family Plan",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/Five Nights at Freddy's",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/The Hunger Games: The Ballad of Songbirds & Snakes/5",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/Killers of the Flower Moon/4",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom TV Shows/Greatest Show Ever (3000)/S01E01",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom TV Shows/Greatest Show Ever (3000)/S01E02",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/Rose",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/The End of the World/5",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/The Unquiet Dead",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/Aliens of London (1)/4",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Departure/5",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Secrets and Lies",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/The Way Out",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom Movies/Movie One",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom Movies/Movie Two",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom Movies/Movie Three (2022)",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom TV Shows/Greatest Show Ever (3000)/S01E03",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Tears of Steel",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Parallels and Interiors/4"
|
||||
]
|
||||
|
||||
# Expected values for the mark.log file, dry-run is slightly different than write-run
|
||||
# due to some of the items being copied over from one server to another and now being there
|
||||
# for the next server run.
|
||||
if args.guids:
|
||||
expected_values = expected_guids
|
||||
elif args.locations:
|
||||
expected_values = expected_locations
|
||||
elif args.write:
|
||||
expected_values = expected_write
|
||||
elif args.plex:
|
||||
expected_values = expected_plex
|
||||
elif args.jellyfin:
|
||||
expected_values = expected_jellyfin
|
||||
elif args.emby:
|
||||
expected_values = expected_emby
|
||||
else:
|
||||
print("No server specified")
|
||||
exit(1)
|
||||
|
||||
lines = read_marklog()
|
||||
if not check_marklog(lines, expected_values):
|
||||
print("Failed to validate marklog")
|
||||
for line in lines:
|
||||
# Remove the newline character
|
||||
line = line.strip()
|
||||
|
||||
print(line)
|
||||
|
||||
exit(1)
|
||||
|
||||
print("Successfully validated marklog")
|
||||
exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user