Compare commits
449 Commits
| Author | SHA1 | Date |
|---|---|---|
|
|
a1adf97bfb | |
|
|
8dea290dd1 | |
|
|
90358f0176 | |
|
|
96239d6704 | |
|
|
e8e6d6469f | |
|
|
0dc338df6f | |
|
|
72df1cd0ad | |
|
|
8af7fe00f9 | |
|
|
6eefedfc40 | |
|
|
2914dbb81c | |
|
|
52c780d8a7 | |
|
|
0276e7c8eb | |
|
|
bf50defcb5 | |
|
|
71d753878e | |
|
|
21c530d956 | |
|
|
142c9df6e9 | |
|
|
629f50ecdc | |
|
|
3e2450b5fd | |
|
|
0de5e86837 | |
|
|
33a719f693 | |
|
|
9ff985a848 | |
|
|
5501e21aa8 | |
|
|
2208d91d07 | |
|
|
75f7f576ac | |
|
|
24f56769f9 | |
|
|
29e4f224dc | |
|
|
bdb58918e7 | |
|
|
c3be980eea | |
|
|
c1a26dd73b | |
|
|
e5d5f11f33 | |
|
|
616ca92d5e | |
|
|
b2b214c987 | |
|
|
07542b498e | |
|
|
9e53c0f8e2 | |
|
|
98266de678 | |
|
|
9d4f3dd432 | |
|
|
cc9b84fefa | |
|
|
c76bb3b355 | |
|
|
544649effd | |
|
|
46b60bb866 | |
|
|
5670c3ad97 | |
|
|
7e0f4babda | |
|
|
d5c36c61ec | |
|
|
69cd73d965 | |
|
|
229ab59b44 | |
|
|
3e474a4593 | |
|
|
69958a257b | |
|
|
64c1823e5b | |
|
|
446f6df470 | |
|
|
91ea5d76f6 | |
|
|
dc26b9a7b1 | |
|
|
d98b7c3e09 | |
|
|
93d9471333 | |
|
|
e6fa8ae745 | |
|
|
5b644a54a2 | |
|
|
5a17c5f7a1 | |
|
|
61e3dddd6b | |
|
|
aaaa7eba70 | |
|
|
991355716d | |
|
|
54bd6e836f | |
|
|
57c41f41bc | |
|
|
ea85a31d9c | |
|
|
80d5c9e54c | |
|
|
5828701944 | |
|
|
81ba9bd7f9 | |
|
|
d15759570e | |
|
|
1b88ecf2eb | |
|
|
c62809c615 | |
|
|
899a6b05a4 | |
|
|
fcd6103e17 | |
|
|
ac5be474f8 | |
|
|
d15f29b772 | |
|
|
c9944866f8 | |
|
|
846e18fffe | |
|
|
eb09de2bdf | |
|
|
c0e207924c | |
|
|
e48533dfbd | |
|
|
8503b087b2 | |
|
|
305fea8f9a | |
|
|
588c23ce41 | |
|
|
8f4a2e2690 | |
|
|
38e65f5a17 | |
|
|
de32d59aa1 | |
|
|
998f2b1209 | |
|
|
0b02f531c1 | |
|
|
e589935b37 | |
|
|
031d43e980 | |
|
|
ba6cad13f6 | |
|
|
f3801a0bd2 | |
|
|
196a49fca4 | |
|
|
4d0f1d303f | |
|
|
ce5b810a5b | |
|
|
a1e1ccde42 | |
|
|
bf633c75d1 | |
|
|
46fa5e7c9a | |
|
|
170757aca1 | |
|
|
9786e9e27d | |
|
|
8b691b7bfa | |
|
|
e1c65fc082 | |
|
|
58749a4fb8 | |
|
|
51ec69f651 | |
|
|
4771f736b0 | |
|
|
8d7436579e | |
|
|
43e1df98b1 | |
|
|
3017030f52 | |
|
|
348a0b8226 | |
|
|
4e60c08120 | |
|
|
10b58379cd | |
|
|
fa9201b20f | |
|
|
86f72997b4 | |
|
|
62d0319aad | |
|
|
a096a09eb7 | |
|
|
7294241fed | |
|
|
a5995d3999 | |
|
|
30f31b2f3f | |
|
|
bc09c873e9 | |
|
|
8428be9dda | |
|
|
6a45ad18f9 | |
|
|
023b638729 | |
|
|
7e13c14636 | |
|
|
0c218fa9dd | |
|
|
b3b0ccac73 | |
|
|
fa0134551f | |
|
|
34d62c9021 | |
|
|
920bbbb3be | |
|
|
762e5f10da | |
|
|
27797cb361 | |
|
|
066f9d1f66 | |
|
|
acf7c2cdf2 | |
|
|
469857a31a | |
|
|
405e5decf2 | |
|
|
da9abf8a24 | |
|
|
128c6a1c76 | |
|
|
99f32c10ef | |
|
|
44e42f99db | |
|
|
b1639eab0f | |
|
|
679d3535b1 | |
|
|
a795d4bba5 | |
|
|
0a025cf5fa | |
|
|
6a1ceb4db3 | |
|
|
99c339c405 | |
|
|
bd75d865ba | |
|
|
d30e03b702 | |
|
|
3b749faefb | |
|
|
74f29d44b3 | |
|
|
a397ceb54e | |
|
|
502b3616df | |
|
|
1a7178e32d | |
|
|
7119956ec7 | |
|
|
24035e217e | |
|
|
21ffce674f | |
|
|
4185f5fc94 | |
|
|
3fdcc99304 | |
|
|
0fa2a698ac | |
|
|
2b871c58ed | |
|
|
5078243938 | |
|
|
b67e6d7257 | |
|
|
632dfbcadb | |
|
|
1f7da2f609 | |
|
|
b3175305bd | |
|
|
5b1933cb08 | |
|
|
ae71ca0940 | |
|
|
9b38729b95 | |
|
|
402c286742 | |
|
|
dcd4ac1d36 | |
|
|
e6fbf746d8 | |
|
|
803d248cb8 | |
|
|
713be6970c | |
|
|
62509f16db | |
|
|
84899aef50 | |
|
|
86b30e1887 | |
|
|
033ef76cfe | |
|
|
815596379c | |
|
|
bc5e8bc65d | |
|
|
b32de7259b | |
|
|
29cb0cebd5 | |
|
|
6744ebcb5b | |
|
|
c6b026a82d | |
|
|
cc706938ce | |
|
|
84b98db36b | |
|
|
01ad15e2bd | |
|
|
54adf0e56f | |
|
|
025e40b098 | |
|
|
4534854001 | |
|
|
362d54b471 | |
|
|
fa533ff65e | |
|
|
96fe367562 | |
|
|
9566ffa384 | |
|
|
f5835e1e72 | |
|
|
fe65716706 | |
|
|
873735900f | |
|
|
28c166146e | |
|
|
c6affc3108 | |
|
|
59b49fd0df | |
|
|
6ec003f899 | |
|
|
95f2a9ad30 | |
|
|
7317e8533d | |
|
|
f80c20d70c | |
|
|
01fc13c3e0 | |
|
|
1edfecae42 | |
|
|
9dab9a4632 | |
|
|
98a824bfdc | |
|
|
8fa9351ef1 | |
|
|
64b2197844 | |
|
|
26f1f80be7 | |
|
|
2e4c2a6817 | |
|
|
9498335e22 | |
|
|
26f40110d0 | |
|
|
9375d482b0 | |
|
|
de9180a124 | |
|
|
ba480d2cb7 | |
|
|
5014748ee1 | |
|
|
4e25ae5539 | |
|
|
a2b802a5de | |
|
|
9739b27718 | |
|
|
bdf6476689 | |
|
|
b8b627be1a | |
|
|
03cad668aa | |
|
|
2e0ec9aa38 | |
|
|
4b02aae889 | |
|
|
c91ba0b1b3 | |
|
|
6b7f8b04e6 | |
|
|
5472baab51 | |
|
|
d5b6859bf8 | |
|
|
8e23847c79 | |
|
|
0c1579bae7 | |
|
|
3dc50fff95 | |
|
|
b8273f50c2 | |
|
|
dbea28e9c6 | |
|
|
a1b11ab039 | |
|
|
1841b0dea6 | |
|
|
b311bf2770 | |
|
|
df13cef760 | |
|
|
76ac264b25 | |
|
|
93bc94add5 | |
|
|
79325b8c61 | |
|
|
58c1eb7004 | |
|
|
466f292feb | |
|
|
4de25a0d4a | |
|
|
43d6bc0d82 | |
|
|
b53d7c9ecc | |
|
|
116d50a75a | |
|
|
e1fb365096 | |
|
|
03617dacfc | |
|
|
e6b33f1bc9 | |
|
|
d9e6a554f6 | |
|
|
7ef37fe848 | |
|
|
dd64617cbd | |
|
|
a227c01a7f | |
|
|
da53609385 | |
|
|
e94a8fb2c3 | |
|
|
d87542ab78 | |
|
|
945ffb2fb3 | |
|
|
da808ba25e | |
|
|
e4b4c7ba39 | |
|
|
43ead4bb0f | |
|
|
c4a2f8af39 | |
|
|
fd281a50b6 | |
|
|
f8ef4fe6c9 | |
|
|
faef0ae246 | |
|
|
117932e272 | |
|
|
4297708d3e | |
|
|
2d00d8cb3e | |
|
|
0190788658 | |
|
|
b46d4a7166 | |
|
|
994d529f59 | |
|
|
7f347ae186 | |
|
|
4a4c9f9ccf | |
|
|
cd4ce186ca | |
|
|
ca5403f97b | |
|
|
7bb76f62a5 | |
|
|
dcdbe44648 | |
|
|
f91005f0ba | |
|
|
5baea978ab | |
|
|
9cc1f96eea | |
|
|
2a65c4b5ca | |
|
|
e1ef6615cc | |
|
|
d607c9c821 | |
|
|
f6b2186824 | |
|
|
a3fc53059c | |
|
|
6afe123947 | |
|
|
7e9c6bb338 | |
|
|
89a2768fc9 | |
|
|
9ff3bdf302 | |
|
|
2c48e89435 | |
|
|
6ccb68aeb3 | |
|
|
032243de0a | |
|
|
5b1b9ec222 | |
|
|
375c6b23a5 | |
|
|
b378dff0dc | |
|
|
23f2d287d6 | |
|
|
3cd73e54a1 | |
|
|
bf5d875079 | |
|
|
aef884523b | |
|
|
2a59f38faf | |
|
|
3a0e60c772 | |
|
|
fb657d41db | |
|
|
ac7f389563 | |
|
|
237e82eceb | |
|
|
8fab4304a4 | |
|
|
971c9e9147 | |
|
|
cacbca5a07 | |
|
|
e4dbd8adfb | |
|
|
19f77c89e7 | |
|
|
ce1b922f71 | |
|
|
81e967864d | |
|
|
29f55104bc | |
|
|
ff2e2deb20 | |
|
|
3fa55cb41b | |
|
|
aa5d97a0d5 | |
|
|
89c4f15ae8 | |
|
|
1351bfc1cf | |
|
|
32cc76f043 | |
|
|
968cb2091d | |
|
|
8986c1037b | |
|
|
87b4a950f1 | |
|
|
9f61c7338d | |
|
|
ffc81dad69 | |
|
|
7eba46b5cb | |
|
|
aa177666a5 | |
|
|
7de7b42fd2 | |
|
|
03d1fd8019 | |
|
|
485ec5fe2d | |
|
|
59bfbd9811 | |
|
|
1e485b37f8 | |
|
|
4adf94f24b | |
|
|
1a0fab36d3 | |
|
|
a1ef3b5a8d | |
|
|
0c47ee7119 | |
|
|
e51cf6e482 | |
|
|
24d5de813d | |
|
|
9921b2a355 | |
|
|
faa378c75e | |
|
|
26199100dc | |
|
|
bee854f059 | |
|
|
73c1ebf3ed | |
|
|
397dd17429 | |
|
|
73d18dad92 | |
|
|
94d63a3fdb | |
|
|
120d89e8be | |
|
|
eb5534c61c | |
|
|
99d217e8f1 | |
|
|
f7e3f8ae2a | |
|
|
2cebd2d73d | |
|
|
18df322c41 | |
|
|
fc80f50560 | |
|
|
4870ff9e7a | |
|
|
58337bd38c | |
|
|
e6d1e0933a | |
|
|
68e3f25ba4 | |
|
|
c981426db6 | |
|
|
916b16b12c | |
|
|
a178d230de | |
|
|
fffb04728a | |
|
|
658361383a | |
|
|
3330026de6 | |
|
|
25fe426720 | |
|
|
8d53b5b8c0 | |
|
|
0774735f0f | |
|
|
a5540b94d5 | |
|
|
c69d59858d | |
|
|
962b1149ad | |
|
|
a8edee0354 | |
|
|
3627dde64d | |
|
|
80ec0e42c2 | |
|
|
fd64088bde | |
|
|
7832e41a3b | |
|
|
cadd65d69b | |
|
|
9f004797fc | |
|
|
9041fee7ad | |
|
|
9af6c9057c | |
|
|
757ce91138 | |
|
|
98f96ed5c7 | |
|
|
3e15120e2a | |
|
|
5824e6c0cc | |
|
|
7087d75efb | |
|
|
b2a06b8fd3 | |
|
|
1ee055faf5 | |
|
|
404089dfca | |
|
|
ed24948dee | |
|
|
1f16fcb8eb | |
|
|
03de3affd7 | |
|
|
2bad887659 | |
|
|
796be47a63 | |
|
|
dc1fe11590 | |
|
|
13b4ff3215 | |
|
|
dca54cf4fb | |
|
|
a4365e59f3 | |
|
|
b960bccb86 | |
|
|
218037200c | |
|
|
4ac670e837 | |
|
|
96eff65c3e | |
|
|
45471607c8 | |
|
|
14885744b1 | |
|
|
d1fd61f1d1 | |
|
|
6c1ee4a7dc | |
|
|
9a8e799e68 | |
|
|
ffec4e2f28 | |
|
|
00102891a5 | |
|
|
aa76b83428 | |
|
|
a644189ea5 | |
|
|
c5d987a8c9 | |
|
|
bdd68ad68d | |
|
|
2d86bca781 | |
|
|
1b01ff6ec2 | |
|
|
f08ec43507 | |
|
|
7f9424260a | |
|
|
5f21943353 | |
|
|
a5a795f43c | |
|
|
fcb6d7625f | |
|
|
fd2179998f | |
|
|
654e7f20e1 | |
|
|
1eb92cf7c1 | |
|
|
111e284cc8 | |
|
|
1a4e3f4ec4 | |
|
|
4066228e57 | |
|
|
59c6d278e3 | |
|
|
39b33f3d43 | |
|
|
e8faf52b2b | |
|
|
370e9bac63 | |
|
|
d0746cec5a | |
|
|
251937431b | |
|
|
50faf061af | |
|
|
9ffbc49ad3 | |
|
|
644dc8e3af | |
|
|
47bc4e94dc | |
|
|
f17d39fe17 | |
|
|
966dcacf8d | |
|
|
9afc00443c | |
|
|
3ec177ea64 | |
|
|
b360c9fd0b | |
|
|
1ed791b1ed | |
|
|
f19b1a3063 | |
|
|
190a72bd3c | |
|
|
c848106ce7 | |
|
|
dd319271bd | |
|
|
16879cc728 | |
|
|
942ec3533f | |
|
|
9f6edfc91a | |
|
|
827ace2e97 | |
|
|
f6b57a1b4d | |
|
|
88a7526721 | |
|
|
1efb4d8543 | |
|
|
7571e9a343 | |
|
|
7640e9ee03 | |
|
|
50ed3d6400 | |
|
|
c9a373851f | |
|
|
a3f3db8f4e | |
|
|
de619de923 |
|
|
@ -1 +1,13 @@
|
|||
.env
|
||||
.venv
|
||||
.dockerignore
|
||||
.env
|
||||
.env.sample
|
||||
.git
|
||||
.github
|
||||
.gitignore
|
||||
.idea
|
||||
.vscode
|
||||
|
||||
Dockerfile*
|
||||
README.md
|
||||
test
|
||||
|
|
|
|||
|
|
@ -0,0 +1,117 @@
|
|||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "False"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
#DEBUG_LEVEL = "DEBUG"
|
||||
DEBUG_LEVEL = "INFO"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = "60"
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "/mnt/log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "/mnt/mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 1
|
||||
|
||||
## Generate guids/locations
|
||||
## These are slow processes, so this is a way to speed things up
|
||||
## If media servers are using the same files then you can enable only generate locations
|
||||
## If media servers are using different files then you can enable only generate guids
|
||||
## Default is to generate both
|
||||
GENERATE_GUIDS = "True"
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma separated for multiple options
|
||||
# jellyfin: plex,plex
|
||||
#USER_MAPPING = { "belandbroc": "debila,belan49", "debila,belan49": "belandbroc", "debila": "belandbroc", "belan49": "belandbroc" }
|
||||
USER_MAPPING = { "belandbroc":"debila", "debila":"belandbroc", "debila":"belandbroc" }
|
||||
|
||||
## Map libraries between servers in the event that they are different, order does not matter
|
||||
## Comma separated for multiple options
|
||||
LIBRARY_MAPPING = { "TV Shows": "Scratch TV Shows", "Scratch TV Shows": "TV Shows" }
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma separated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
WHITELIST_LIBRARY = "TV Shows,Scratch TV Shows,Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = ""
|
||||
#WHITELIST_LIBRARY_TYPE = ""
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "belandbroc,debila"
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma separated list for multiple servers
|
||||
PLEX_BASEURL = "http://192.168.0.86:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma separated list for multiple servers
|
||||
# PLEX_TOKEN = "vPGyuy6zWVCz6ZFyy8x1"
|
||||
# # debila=debilapointe@gmail
|
||||
PLEX_TOKEN = "S7gbVzAzH4ypN-4K7ta5"
|
||||
# me
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma separated for multiple options
|
||||
#PLEX_USERNAME = "squeaky2x3@gmail.com"
|
||||
#PLEX_PASSWORD = "qoDuGNsGsWRurOd5QFdRy2@"
|
||||
#PLEX_SERVERNAME = "Scratch"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma separated list for multiple servers
|
||||
JELLYFIN_BASEURL = "https://jellyfin.home.blapointe.com"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma separated list for multiple servers
|
||||
JELLYFIN_TOKEN = "1dc766ce6ca44c53b773263a06889b96"
|
||||
|
||||
|
||||
# # Emby
|
||||
#
|
||||
# ## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
# ## Comma seperated list for multiple servers
|
||||
# EMBY_BASEURL = "http://localhost:8097"
|
||||
#
|
||||
# ## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
# ## Comma seperated list for multiple servers
|
||||
# EMBY_TOKEN = "SuperSecretToken"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "False"
|
||||
#SYNC_FROM_PLEX_TO_EMBY = "False"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "False"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "False"
|
||||
#SYNC_FROM_JELLYFIN_TO_EMBY = "False"
|
||||
|
||||
#SYNC_FROM_EMBY_TO_PLEX = "False"
|
||||
#SYNC_FROM_EMBY_TO_JELLYFIN = "False"
|
||||
#SYNC_FROM_EMBY_TO_EMBY = "False"
|
||||
123
.env.sample
123
.env.sample
|
|
@ -1,42 +1,111 @@
|
|||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
## Additional logging information
|
||||
DEBUG = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "info"
|
||||
DEBUG_LEVEL = "INFO"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "False"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = "3600"
|
||||
## Log file where all output will be written to
|
||||
LOGFILE = "log.log"
|
||||
## Map usernames between plex and jellyfin in the event that they are different, order does not matter
|
||||
#USER_MAPPING = { "testuser2": "testuser3" }
|
||||
## Map libraries between plex and jellyfin in the even that they are different, order does not matter
|
||||
#LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 1
|
||||
|
||||
## Generate guids/locations
|
||||
## These are slow processes, so this is a way to speed things up
|
||||
## If media servers are using the same files then you can enable only generate locations
|
||||
## If media servers are using different files then you can enable only generate guids
|
||||
## Default is to generate both
|
||||
GENERATE_GUIDS = "True"
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma separated for multiple options
|
||||
USER_MAPPING = { "Username": "User", "Second User": "User Dos" }
|
||||
|
||||
## Map libraries between servers in the event that they are different, order does not matter
|
||||
## Comma separated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows", "Movie": "Movies" }
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma separated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = ""
|
||||
#BLACKLIST_LIBRARY_TYPE = ""
|
||||
#WHITELIST_LIBRARY_TYPE = ""
|
||||
#BLACKLIST_USERS = ""
|
||||
#WHITELIST_USERS = ""
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
PLEX_TOKEN = "SuperSecretToken"
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
#PLEX_USERNAME = ""
|
||||
#PLEX_PASSWORD = ""
|
||||
#PLEX_SERVERNAME = "Plex Server"
|
||||
## Comma separated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400, https://nas:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma separated list for multiple servers
|
||||
PLEX_TOKEN = "SuperSecretToken, SuperSecretToken2"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma separated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "False"
|
||||
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
## Comma separated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096, http://nas:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
JELLYFIN_TOKEN = "SuperSecretToken"
|
||||
## Comma separated list for multiple servers
|
||||
JELLYFIN_TOKEN = "SuperSecretToken, SuperSecretToken2"
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = ""
|
||||
#BLACKLIST_LIBRARY_TYPE = ""
|
||||
#WHITELIST_LIBRARY_TYPE = ""
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "testuser1,testuser2"
|
||||
# Emby
|
||||
|
||||
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_BASEURL = "http://localhost:8097"
|
||||
|
||||
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_TOKEN = "SuperSecretToken"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,15 @@
|
|||
# These are supported funding model platforms
|
||||
|
||||
github: [Luigi311]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||
polar: # Replace with a single Polar username
|
||||
buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
|
||||
thanks_dev: # Replace with a single thanks.dev username
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: "[BUG]"
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Logs**
|
||||
If applicable, add logs to help explain your problem ideally with DEBUG set to true, be sure to remove sensitive information
|
||||
|
||||
**Type:**
|
||||
- [ ] Docker Compose
|
||||
- [ ] Docker
|
||||
- [ ] Unraid
|
||||
- [ ] Native
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: "[Feature Request]"
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
|
|
@ -1,86 +1,203 @@
|
|||
name: CI
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- .gitignore
|
||||
- "*.md"
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- .gitignore
|
||||
- "*.md"
|
||||
|
||||
jobs:
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: "Install dependencies"
|
||||
run: pip install -r requirements.txt && pip install -r test/requirements.txt
|
||||
|
||||
- name: "Run tests"
|
||||
run: pytest -vvv
|
||||
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
needs: pytest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Docker meta
|
||||
id: docker_meta
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
if: "${{ env.DOCKER_USERNAME != '' }}"
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_USERNAME }}/jellyplex-watched # list of Docker images to use as base name for tags
|
||||
tags: |
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=sha
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Build
|
||||
id: build
|
||||
if: "${{ steps.docker_meta.outcome == 'skipped' }}"
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: false
|
||||
tags: jellyplex-watched:action
|
||||
|
||||
- name: Build Push
|
||||
id: build_push
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
|
||||
# Echo digest so users can validate their image
|
||||
- name: Image digest
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
run: echo "${{ steps.build_push.outputs.digest }}"
|
||||
name: CI
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths-ignore:
|
||||
- .gitignore
|
||||
- "*.md"
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- .gitignore
|
||||
- "*.md"
|
||||
|
||||
env:
|
||||
PYTHON_VERSION: '3.13'
|
||||
|
||||
jobs:
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
|
||||
- name: "Set up Python"
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: ".python-version"
|
||||
|
||||
- name: "Install dependencies"
|
||||
run: uv sync --frozen
|
||||
|
||||
- name: "Run tests"
|
||||
run: uv run pytest -vvv
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
|
||||
- name: "Set up Python"
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: ".python-version"
|
||||
|
||||
- name: "Install dependencies"
|
||||
run: |
|
||||
uv sync --frozen
|
||||
sudo apt update && sudo apt install -y docker-compose
|
||||
|
||||
- name: "Checkout JellyPlex-Watched-CI"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: luigi311/JellyPlex-Watched-CI
|
||||
path: JellyPlex-Watched-CI
|
||||
|
||||
- name: "Start containers"
|
||||
run: |
|
||||
JellyPlex-Watched-CI/start_containers.sh
|
||||
|
||||
# Wait for containers to start
|
||||
sleep 10
|
||||
|
||||
for FOLDER in $(find "JellyPlex-Watched-CI" -type f -name "docker-compose.yml" -exec dirname {} \;); do
|
||||
docker compose -f "${FOLDER}/docker-compose.yml" logs
|
||||
done
|
||||
|
||||
- name: "Test Plex"
|
||||
run: |
|
||||
ENV_FILE="test/ci_plex.env" uv run main.py
|
||||
uv run test/validate_ci_marklog.py --plex
|
||||
|
||||
rm mark.log
|
||||
|
||||
- name: "Test Jellyfin"
|
||||
run: |
|
||||
ENV_FILE="test/ci_jellyfin.env" uv run main.py
|
||||
uv run test/validate_ci_marklog.py --jellyfin
|
||||
|
||||
rm mark.log
|
||||
|
||||
- name: "Test Emby"
|
||||
run: |
|
||||
ENV_FILE="test/ci_emby.env" uv run main.py
|
||||
uv run test/validate_ci_marklog.py --emby
|
||||
|
||||
rm mark.log
|
||||
|
||||
- name: "Test Guids"
|
||||
run: |
|
||||
ENV_FILE="test/ci_guids.env" uv run main.py
|
||||
uv run test/validate_ci_marklog.py --guids
|
||||
|
||||
rm mark.log
|
||||
|
||||
- name: "Test Locations"
|
||||
run: |
|
||||
ENV_FILE="test/ci_locations.env" uv run main.py
|
||||
uv run test/validate_ci_marklog.py --locations
|
||||
|
||||
rm mark.log
|
||||
|
||||
- name: "Test writing to the servers"
|
||||
run: |
|
||||
# Test writing to the servers
|
||||
ENV_FILE="test/ci_write.env" uv run main.py
|
||||
|
||||
# Test again to test if it can handle existing data
|
||||
ENV_FILE="test/ci_write.env" uv run main.py
|
||||
|
||||
uv run test/validate_ci_marklog.py --write
|
||||
|
||||
rm mark.log
|
||||
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- pytest
|
||||
- test
|
||||
env:
|
||||
DEFAULT_VARIANT: alpine
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- dockerfile: Dockerfile.alpine
|
||||
variant: alpine
|
||||
- dockerfile: Dockerfile.slim
|
||||
variant: slim
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Docker meta
|
||||
id: docker_meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ secrets.DOCKER_USERNAME }}/jellyplex-watched,enable=${{ secrets.DOCKER_USERNAME != '' }}
|
||||
# Do not push to ghcr.io on PRs due to permission issues, only push if the owner is luigi311 so it doesnt fail on forks
|
||||
ghcr.io/${{ github.repository }},enable=${{ github.event_name != 'pull_request' && github.repository_owner == 'luigi311'}}
|
||||
flavor: latest=false
|
||||
tags: |
|
||||
type=raw,value=latest,enable=${{ matrix.variant == env.DEFAULT_VARIANT && startsWith(github.ref, 'refs/tags/') }}
|
||||
type=raw,value=latest,suffix=-${{ matrix.variant }},enable=${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
|
||||
type=ref,event=branch,suffix=-${{ matrix.variant }}
|
||||
type=ref,event=branch,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
|
||||
type=ref,event=pr,suffix=-${{ matrix.variant }}
|
||||
type=ref,event=pr,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
|
||||
type=semver,pattern={{ version }},suffix=-${{ matrix.variant }}
|
||||
type=semver,pattern={{ version }},enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
|
||||
type=semver,pattern={{ major }}.{{ minor }},suffix=-${{ matrix.variant }}
|
||||
type=semver,pattern={{ major }}.{{ minor }},enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
|
||||
type=sha,suffix=-${{ matrix.variant }}
|
||||
type=sha,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
if: "${{ env.DOCKER_USERNAME != '' }}"
|
||||
id: docker_login
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Push
|
||||
id: build_push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.dockerfile }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ steps.docker_login.outcome == 'success' }}
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
|
||||
# Echo digest so users can validate their image
|
||||
- name: Image digest
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
run: echo "${{ steps.build_push.outputs.digest }}"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,41 @@
|
|||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
schedule:
|
||||
- cron: "23 20 * * 6"
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ python ]
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
queries: +security-and-quality
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: "/language:${{ matrix.language }}"
|
||||
|
|
@ -1,132 +1,129 @@
|
|||
.env
|
||||
*.prof
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
**.env*
|
||||
*.prof
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
|
|
|||
|
|
@ -0,0 +1 @@
|
|||
3.13
|
||||
|
|
@ -1,16 +1,27 @@
|
|||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Main",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"program": "main.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true
|
||||
}
|
||||
]
|
||||
}
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Main",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "main.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true
|
||||
},
|
||||
{
|
||||
"name": "Pytest",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "pytest",
|
||||
"args": [
|
||||
"-vv"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"[python]" : {
|
||||
"editor.formatOnSave": true,
|
||||
},
|
||||
"python.formatting.provider": "black",
|
||||
|
||||
}
|
||||
35
Dockerfile
35
Dockerfile
|
|
@ -1,35 +0,0 @@
|
|||
FROM python:3-slim
|
||||
|
||||
ENV DRYRUN 'True'
|
||||
ENV DEBUG 'True'
|
||||
ENV DEBUG_LEVEL 'INFO'
|
||||
ENV SLEEP_DURATION '3600'
|
||||
ENV LOGFILE 'log.log'
|
||||
|
||||
ENV USER_MAPPING '{ "User Test": "User Test2" }'
|
||||
ENV LIBRARY_MAPPING '{ "Shows Test": "TV Shows Test" }'
|
||||
|
||||
ENV PLEX_BASEURL 'http://localhost:32400'
|
||||
ENV PLEX_TOKEN ''
|
||||
ENV PLEX_USERNAME ''
|
||||
ENV PLEX_PASSWORD ''
|
||||
ENV PLEX_SERVERNAME ''
|
||||
|
||||
ENV JELLYFIN_BASEURL 'http://localhost:8096'
|
||||
ENV JELLYFIN_TOKEN ''
|
||||
|
||||
ENV BLACKLIST_LIBRARY ''
|
||||
ENV WHITELIST_LIBRARY ''
|
||||
ENV BLACKLIST_LIBRARY_TYPE ''
|
||||
ENV WHITELIST_LIBRARY_TYPE ''
|
||||
ENV BLACKLIST_USERS ''
|
||||
ENV WHITELIST_USERS ''
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./requirements.txt ./
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
CMD ["python", "-u", "main.py"]
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
FROM ghcr.io/astral-sh/uv:python3.13-alpine
|
||||
|
||||
ENV PUID=1000
|
||||
ENV PGID=1000
|
||||
ENV GOSU_VERSION=1.17
|
||||
|
||||
RUN apk add --no-cache tini dos2unix
|
||||
|
||||
# Install gosu
|
||||
RUN set -eux; \
|
||||
\
|
||||
apk add --no-cache --virtual .gosu-deps \
|
||||
ca-certificates \
|
||||
dpkg \
|
||||
gnupg \
|
||||
; \
|
||||
\
|
||||
dpkgArch="$(dpkg --print-architecture | awk -F- '{ print $NF }')"; \
|
||||
wget -O /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$dpkgArch"; \
|
||||
wget -O /usr/local/bin/gosu.asc "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$dpkgArch.asc"; \
|
||||
\
|
||||
# verify the signature
|
||||
export GNUPGHOME="$(mktemp -d)"; \
|
||||
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4; \
|
||||
gpg --batch --verify /usr/local/bin/gosu.asc /usr/local/bin/gosu; \
|
||||
gpgconf --kill all; \
|
||||
rm -rf "$GNUPGHOME" /usr/local/bin/gosu.asc; \
|
||||
\
|
||||
# clean up fetch dependencies
|
||||
apk del --no-network .gosu-deps; \
|
||||
\
|
||||
chmod +x /usr/local/bin/gosu; \
|
||||
# verify that the binary works
|
||||
gosu --version; \
|
||||
gosu nobody true
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Enable bytecode compilation
|
||||
ENV UV_COMPILE_BYTECODE=1
|
||||
|
||||
ENV UV_LINK_MODE=copy
|
||||
|
||||
# Set the cache directory to /tmp instead of root
|
||||
ENV UV_CACHE_DIR=/tmp/.cache/uv
|
||||
|
||||
# Install the project's dependencies using the lockfile and settings
|
||||
RUN --mount=type=cache,target=/tmp/.cache/uv \
|
||||
--mount=type=bind,source=uv.lock,target=uv.lock \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
uv sync --frozen --no-install-project --no-dev
|
||||
|
||||
# Then, add the rest of the project source code and install it
|
||||
# Installing separately from its dependencies allows optimal layer caching
|
||||
COPY . /app
|
||||
RUN --mount=type=cache,target=/tmp/.cache/uv \
|
||||
uv sync --frozen --no-dev
|
||||
|
||||
# Place executables in the environment at the front of the path
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN chmod +x *.sh && \
|
||||
dos2unix *.sh
|
||||
|
||||
# Set default values to prevent issues
|
||||
ENV DRYRUN="True"
|
||||
ENV DEBUG_LEVEL="INFO"
|
||||
ENV RUN_ONLY_ONCE="False"
|
||||
ENV SLEEP_DURATION=3600
|
||||
ENV LOG_FILE="log.log"
|
||||
ENV MARK_FILE="mark.log"
|
||||
ENV REQUEST_TIME=300
|
||||
ENV GENERATE_GUIDS="True"
|
||||
ENV GENERATE_LOCATIONS="True"
|
||||
ENV MAX_THREADS=1
|
||||
ENV USER_MAPPING=""
|
||||
ENV LIBRARY_MAPPING=""
|
||||
ENV BLACKLIST_LIBRARY=""
|
||||
ENV WHITELIST_LIBRARY=""
|
||||
ENV BLACKLIST_LIBRARY_TYPE=""
|
||||
ENV WHITELIST_LIBRARY_TYPE=""
|
||||
ENV BLACKLIST_USERS=""
|
||||
ENV WHITELIST_USERS=""
|
||||
ENV PLEX_BASEURL=""
|
||||
ENV PLEX_TOKEN=""
|
||||
ENV PLEX_USERNAME=""
|
||||
ENV PLEX_PASSWORD=""
|
||||
ENV PLEX_SERVERNAME=""
|
||||
ENV SSL_BYPASS="False"
|
||||
ENV JELLYFIN_BASEURL=""
|
||||
ENV JELLYFIN_TOKEN=""
|
||||
ENV EMBY_BASEURL=""
|
||||
ENV EMBY_TOKEN=""
|
||||
ENV SYNC_FROM_PLEX_TO_JELLYFIN="True"
|
||||
ENV SYNC_FROM_PLEX_TO_PLEX="True"
|
||||
ENV SYNC_FROM_PLEX_TO_EMBY="True"
|
||||
ENV SYNC_FROM_JELLYFIN_TO_PLEX="True"
|
||||
ENV SYNC_FROM_JELLYFIN_TO_JELLYFIN="True"
|
||||
ENV SYNC_FROM_JELLYFIN_TO_EMBY="True"
|
||||
ENV SYNC_FROM_EMBY_TO_PLEX="True"
|
||||
ENV SYNC_FROM_EMBY_TO_JELLYFIN="True"
|
||||
ENV SYNC_FROM_EMBY_TO_EMBY="True"
|
||||
|
||||
ENTRYPOINT ["tini", "--", "/app/entrypoint.sh"]
|
||||
CMD ["python", "-u", "main.py"]
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
FROM ghcr.io/astral-sh/uv:bookworm-slim
|
||||
|
||||
ENV PUID=1000
|
||||
ENV PGID=1000
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install tini gosu dos2unix --yes --no-install-recommends && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Enable bytecode compilation
|
||||
ENV UV_COMPILE_BYTECODE=1
|
||||
|
||||
ENV UV_LINK_MODE=copy
|
||||
|
||||
# Set the cache directory to /tmp instead of root
|
||||
ENV UV_CACHE_DIR=/tmp/.cache/uv
|
||||
|
||||
ENV UV_PYTHON_INSTALL_DIR=/app/.bin
|
||||
|
||||
# Install the project's dependencies using the lockfile and settings
|
||||
RUN --mount=type=cache,target=/tmp/.cache/uv \
|
||||
--mount=type=bind,source=uv.lock,target=uv.lock \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
uv sync --frozen --no-install-project --no-dev
|
||||
|
||||
# Then, add the rest of the project source code and install it
|
||||
# Installing separately from its dependencies allows optimal layer caching
|
||||
COPY . /app
|
||||
RUN --mount=type=cache,target=/tmp/.cache/uv \
|
||||
uv sync --frozen --no-dev
|
||||
|
||||
# Place executables in the environment at the front of the path
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
|
||||
RUN chmod +x *.sh && \
|
||||
dos2unix *.sh
|
||||
|
||||
# Set default values to prevent issues
|
||||
ENV DRYRUN="True"
|
||||
ENV DEBUG_LEVEL="INFO"
|
||||
ENV RUN_ONLY_ONCE="False"
|
||||
ENV SLEEP_DURATION=3600
|
||||
ENV LOG_FILE="log.log"
|
||||
ENV MARK_FILE="mark.log"
|
||||
ENV REQUEST_TIME=300
|
||||
ENV GENERATE_GUIDS="True"
|
||||
ENV GENERATE_LOCATIONS="True"
|
||||
ENV MAX_THREADS=1
|
||||
ENV USER_MAPPING=""
|
||||
ENV LIBRARY_MAPPING=""
|
||||
ENV BLACKLIST_LIBRARY=""
|
||||
ENV WHITELIST_LIBRARY=""
|
||||
ENV BLACKLIST_LIBRARY_TYPE=""
|
||||
ENV WHITELIST_LIBRARY_TYPE=""
|
||||
ENV BLACKLIST_USERS=""
|
||||
ENV WHITELIST_USERS=""
|
||||
ENV PLEX_BASEURL=""
|
||||
ENV PLEX_TOKEN=""
|
||||
ENV PLEX_USERNAME=""
|
||||
ENV PLEX_PASSWORD=""
|
||||
ENV PLEX_SERVERNAME=""
|
||||
ENV SSL_BYPASS="False"
|
||||
ENV JELLYFIN_BASEURL=""
|
||||
ENV JELLYFIN_TOKEN=""
|
||||
ENV EMBY_BASEURL=""
|
||||
ENV EMBY_TOKEN=""
|
||||
ENV SYNC_FROM_PLEX_TO_JELLYFIN="True"
|
||||
ENV SYNC_FROM_PLEX_TO_PLEX="True"
|
||||
ENV SYNC_FROM_PLEX_TO_EMBY="True"
|
||||
ENV SYNC_FROM_JELLYFIN_TO_PLEX="True"
|
||||
ENV SYNC_FROM_JELLYFIN_TO_JELLYFIN="True"
|
||||
ENV SYNC_FROM_JELLYFIN_TO_EMBY="True"
|
||||
ENV SYNC_FROM_EMBY_TO_PLEX="True"
|
||||
ENV SYNC_FROM_EMBY_TO_JELLYFIN="True"
|
||||
ENV SYNC_FROM_EMBY_TO_EMBY="True"
|
||||
|
||||
ENTRYPOINT ["/bin/tini", "--", "/app/entrypoint.sh"]
|
||||
CMD ["python", "-u", "main.py"]
|
||||
111
README.md
111
README.md
|
|
@ -1,72 +1,117 @@
|
|||
# JellyPlex-Watched
|
||||
|
||||
[](https://www.codacy.com/gh/luigi311/JellyPlex-Watched/dashboard?utm_source=github.com&utm_medium=referral&utm_content=luigi311/JellyPlex-Watched&utm_campaign=Badge_Grade)
|
||||
[](https://www.codacy.com/gh/luigi311/JellyPlex-Watched/dashboard?utm_source=github.com&utm_medium=referral&utm_content=luigi311/JellyPlex-Watched&utm_campaign=Badge_Grade)
|
||||
|
||||
Sync watched between jellyfin and plex
|
||||
Sync watched between jellyfin, plex and emby locally
|
||||
|
||||
## Description
|
||||
|
||||
Keep in sync all your users watched history between jellyfin and plex servers locally. This uses the imdb ids and any other matching id to find the correct episode/movie between the two. This is not perfect but it works for most cases. You can use this for as many servers as you want by enterying multiple options in the .env plex/jellyfin section seperated by commas.
|
||||
Keep in sync all your users watched history between jellyfin, plex and emby servers locally. This uses file names and provider ids to find the correct episode/movie between the two. This is not perfect but it works for most cases. You can use this for as many servers as you want by entering multiple options in the .env plex/jellyfin section separated by commas.
|
||||
|
||||
## Features
|
||||
|
||||
### Plex
|
||||
|
||||
- \[x] Match via filenames
|
||||
- \[x] Match via provider ids
|
||||
- \[x] Map usernames
|
||||
- \[x] Use single login
|
||||
- \[x] One way/multi way sync
|
||||
- \[x] Sync watched
|
||||
- \[x] Sync in progress
|
||||
- \[ ] Sync view dates
|
||||
|
||||
### Jellyfin
|
||||
|
||||
- \[x] Match via filenames
|
||||
- \[x] Match via provider ids
|
||||
- \[x] Map usernames
|
||||
- \[x] Use single login
|
||||
- \[x] One way/multi way sync
|
||||
- \[x] Sync watched
|
||||
- \[x] Sync in progress
|
||||
- \[x] Sync view dates
|
||||
|
||||
|
||||
### Emby
|
||||
|
||||
- \[x] Match via filenames
|
||||
- \[x] Match via provider ids
|
||||
- \[x] Map usernames
|
||||
- \[x] Use single login
|
||||
- \[x] One way/multi way sync
|
||||
- \[x] Sync watched
|
||||
- \[x] Sync in progress
|
||||
- \[x] Sync view dates
|
||||
|
||||
|
||||
## Configuration
|
||||
|
||||
|
||||
Full list of configuration options can be found in the [.env.sample](.env.sample)
|
||||
|
||||
## Installation
|
||||
|
||||
### Baremetal
|
||||
|
||||
- Setup virtualenv of your choice
|
||||
- [Install uv](https://docs.astral.sh/uv/getting-started/installation/)
|
||||
|
||||
- Install dependencies
|
||||
- Create a .env file similar to .env.sample; fill in baseurls and tokens, **remember to uncomment anything you wish to use** (e.g., user mapping, library mapping, black/whitelist, etc.). If you want to store your .env file anywhere else or under a different name you can use ENV_FILE variable to specify the location.
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
- Run
|
||||
|
||||
- Create a .env file similar to .env.sample, uncomment whitelist and blacklist if needed, fill in baseurls and tokens
|
||||
```bash
|
||||
uv run main.py
|
||||
```
|
||||
|
||||
- Run
|
||||
|
||||
```bash
|
||||
python main.py
|
||||
```
|
||||
```bash
|
||||
ENV_FILE="Test.env" uv run main.py
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
- Build docker image
|
||||
- Build docker image
|
||||
|
||||
```bash
|
||||
docker build -t jellyplex-watched .
|
||||
```
|
||||
```bash
|
||||
docker build -t jellyplex-watched .
|
||||
```
|
||||
|
||||
- or use pre-built image
|
||||
- or use pre-built image
|
||||
|
||||
```bash
|
||||
docker pull luigi311/jellyplex-watched:latest
|
||||
```
|
||||
```bash
|
||||
docker pull luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
#### With variables
|
||||
|
||||
- Run
|
||||
- Run
|
||||
|
||||
```bash
|
||||
docker run --rm -it -e PLEX_TOKEN='SuperSecretToken' luigi311/jellyplex-watched:latest
|
||||
```
|
||||
```bash
|
||||
docker run --rm -it -e PLEX_TOKEN='SuperSecretToken' luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
#### With .env
|
||||
|
||||
- Create a .env file similar to .env.sample and set the MNEMONIC variable to your seed phrase
|
||||
- Create a .env file similar to .env.sample and set the variables to match your setup
|
||||
|
||||
- Run
|
||||
- Run
|
||||
|
||||
```bash
|
||||
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
||||
```
|
||||
```bash
|
||||
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
## Troubleshooting/Issues
|
||||
|
||||
- Jellyfin
|
||||
|
||||
- Attempt to decode JSON with unexpected mimetype, make sure you enable remote access or add your docker subnet to lan networks in jellyfin settings
|
||||
|
||||
- Configuration
|
||||
- Do not use quotes around variables in docker compose
|
||||
- If you are not running all 3 supported servers, that is, Plex, Jellyfin, and Emby simultaneously, make sure to comment out the server url and token of the server you aren't using.
|
||||
|
||||
## Contributing
|
||||
|
||||
I am open to recieving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable. Make all pull requests against the dev branch and nothing will be merged into the main without going through the lower branches.
|
||||
I am open to receiving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable.
|
||||
|
||||
## License
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
# Sync watched status between media servers locally
|
||||
|
||||
services:
|
||||
jellyplex-watched:
|
||||
image: luigi311/jellyplex-watched:latest
|
||||
container_name: jellyplex-watched
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
env_file: "./.env"
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
set -e
|
||||
|
||||
# Check if user is root
|
||||
if [ "$(id -u)" = '0' ]; then
|
||||
echo "User is root, checking if we need to create a user and group based on environment variables"
|
||||
# Create group and user based on environment variables
|
||||
if [ ! "$(getent group "$PGID")" ]; then
|
||||
# If groupadd exists, use it
|
||||
if command -v groupadd > /dev/null; then
|
||||
groupadd -g "$PGID" jellyplex_watched
|
||||
elif command -v addgroup > /dev/null; then
|
||||
addgroup -g "$PGID" jellyplex_watched
|
||||
fi
|
||||
fi
|
||||
|
||||
# If user id does not exist, create the user
|
||||
if [ ! "$(getent passwd "$PUID")" ]; then
|
||||
if command -v useradd > /dev/null; then
|
||||
useradd --no-create-home -u "$PUID" -g "$PGID" jellyplex_watched
|
||||
elif command -v adduser > /dev/null; then
|
||||
# Get the group name based on the PGID since adduser does not have a flag to specify the group id
|
||||
# and if the group id already exists the group name will be sommething unexpected
|
||||
GROUPNAME=$(getent group "$PGID" | cut -d: -f1)
|
||||
|
||||
# Use alpine busybox adduser syntax
|
||||
adduser -D -H -u "$PUID" -G "$GROUPNAME" jellyplex_watched
|
||||
fi
|
||||
fi
|
||||
else
|
||||
# If user is not root, set the PUID and PGID to the current user
|
||||
PUID=$(id -u)
|
||||
PGID=$(id -g)
|
||||
fi
|
||||
|
||||
# Get directory of log and mark file to create base folder if it doesnt exist
|
||||
LOG_DIR=$(dirname "$LOG_FILE")
|
||||
# If LOG_DIR is set, create the directory
|
||||
if [ -n "$LOG_DIR" ]; then
|
||||
mkdir -p "$LOG_DIR"
|
||||
fi
|
||||
|
||||
MARK_DIR=$(dirname "$MARK_FILE")
|
||||
if [ -n "$MARK_DIR" ]; then
|
||||
mkdir -p "$MARK_DIR"
|
||||
fi
|
||||
|
||||
echo "Starting JellyPlex-Watched with UID: $PUID and GID: $PGID"
|
||||
|
||||
# If root run as the created user
|
||||
if [ "$(id -u)" = '0' ]; then
|
||||
chown -R "$PUID:$PGID" /app/.venv
|
||||
chown -R "$PUID:$PGID" "$LOG_DIR"
|
||||
chown -R "$PUID:$PGID" "$MARK_DIR"
|
||||
|
||||
# Run the application as the created user
|
||||
exec gosu "$PUID:$PGID" "$@"
|
||||
else
|
||||
# Run the application as the current user
|
||||
exec "$@"
|
||||
fi
|
||||
21
main.py
21
main.py
|
|
@ -1,10 +1,11 @@
|
|||
import sys
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Check python version 3.6 or higher
|
||||
if not (3, 6) <= tuple(map(int, sys.version_info[:2])):
|
||||
print("This script requires Python 3.6 or higher")
|
||||
sys.exit(1)
|
||||
|
||||
from src.main import main
|
||||
main()
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check python version 3.12 or higher
|
||||
if not (3, 12) <= tuple(map(int, sys.version_info[:2])):
|
||||
print("This script requires Python 3.12 or higher")
|
||||
sys.exit(1)
|
||||
|
||||
from src.main import main
|
||||
|
||||
main()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,24 @@
|
|||
[project]
|
||||
name = "jellyplex-watched"
|
||||
version = "8.3.0"
|
||||
description = "Sync watched between media servers locally"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"loguru>=0.7.3",
|
||||
"packaging==25.0",
|
||||
"plexapi==4.17.1",
|
||||
"pydantic==2.11.7",
|
||||
"python-dotenv==1.1.1",
|
||||
"requests==2.32.5",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
lint = [
|
||||
"ruff>=0.12.3",
|
||||
]
|
||||
dev = [
|
||||
"mypy>=1.16.1",
|
||||
"pytest>=8.4.1",
|
||||
"types-requests>=2.32.0.20250611",
|
||||
]
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
plexapi
|
||||
requests
|
||||
python-dotenv
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
#! /usr/bin/env bash
|
||||
|
||||
d=/tmp/jellyplex.d
|
||||
mkdir -p $d
|
||||
docker run --rm -it -v "$d":/mnt $(
|
||||
if [ "${PWD##*/}" == JellyPlex-Watched ]; then
|
||||
echo "-v $PWD/src:/app/src"
|
||||
fi
|
||||
) -v $PWD/.env:/app/.env \
|
||||
luigi311/jellyplex-watched:latest
|
||||
|
|
@ -0,0 +1,85 @@
|
|||
from loguru import logger
|
||||
|
||||
from src.functions import search_mapping
|
||||
|
||||
|
||||
def setup_black_white_lists(
|
||||
blacklist_library: list[str] | None,
|
||||
whitelist_library: list[str] | None,
|
||||
blacklist_library_type: list[str] | None,
|
||||
whitelist_library_type: list[str] | None,
|
||||
blacklist_users: list[str] | None,
|
||||
whitelist_users: list[str] | None,
|
||||
library_mapping: dict[str, str] | None = None,
|
||||
user_mapping: dict[str, str] | None = None,
|
||||
) -> tuple[list[str], list[str], list[str], list[str], list[str], list[str]]:
|
||||
blacklist_library, blacklist_library_type, blacklist_users = setup_x_lists(
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
blacklist_users,
|
||||
"Black",
|
||||
library_mapping,
|
||||
user_mapping,
|
||||
)
|
||||
|
||||
whitelist_library, whitelist_library_type, whitelist_users = setup_x_lists(
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
whitelist_users,
|
||||
"White",
|
||||
library_mapping,
|
||||
user_mapping,
|
||||
)
|
||||
|
||||
return (
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
)
|
||||
|
||||
|
||||
def setup_x_lists(
|
||||
xlist_library: list[str] | None,
|
||||
xlist_library_type: list[str] | None,
|
||||
xlist_users: list[str] | None,
|
||||
xlist_type: str | None,
|
||||
library_mapping: dict[str, str] | None = None,
|
||||
user_mapping: dict[str, str] | None = None,
|
||||
) -> tuple[list[str], list[str], list[str]]:
|
||||
out_library: list[str] = []
|
||||
if xlist_library:
|
||||
out_library = [x.strip() for x in xlist_library]
|
||||
if library_mapping:
|
||||
temp_library: list[str] = []
|
||||
for library in xlist_library:
|
||||
library_other = search_mapping(library_mapping, library)
|
||||
if library_other:
|
||||
temp_library.append(library_other)
|
||||
|
||||
out_library = out_library + temp_library
|
||||
logger.info(f"{xlist_type}list Library: {xlist_library}")
|
||||
|
||||
out_library_type: list[str] = []
|
||||
if xlist_library_type:
|
||||
out_library_type = [x.lower().strip() for x in xlist_library_type]
|
||||
|
||||
logger.info(f"{xlist_type}list Library Type: {out_library_type}")
|
||||
|
||||
out_users: list[str] = []
|
||||
if xlist_users:
|
||||
out_users = [x.lower().strip() for x in xlist_users]
|
||||
if user_mapping:
|
||||
temp_users: list[str] = []
|
||||
for user in out_users:
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
if user_other:
|
||||
temp_users.append(user_other)
|
||||
|
||||
out_users = out_users + temp_users
|
||||
|
||||
logger.info(f"{xlist_type}list Users: {out_users}")
|
||||
|
||||
return out_library, out_library_type, out_users
|
||||
|
|
@ -0,0 +1,127 @@
|
|||
from typing import Literal
|
||||
from loguru import logger
|
||||
|
||||
from src.functions import str_to_bool, get_env_value
|
||||
from src.plex import Plex
|
||||
from src.jellyfin import Jellyfin
|
||||
from src.emby import Emby
|
||||
|
||||
|
||||
def jellyfin_emby_server_connection(
|
||||
env,
|
||||
server_baseurl: str,
|
||||
server_token: str,
|
||||
server_type: Literal["jellyfin", "emby"],
|
||||
) -> list[Jellyfin | Emby]:
|
||||
servers: list[Jellyfin | Emby] = []
|
||||
server: Jellyfin | Emby
|
||||
|
||||
server_baseurls = server_baseurl.split(",")
|
||||
server_tokens = server_token.split(",")
|
||||
|
||||
if len(server_baseurls) != len(server_tokens):
|
||||
raise Exception(
|
||||
f"{server_type.upper()}_BASEURL and {server_type.upper()}_TOKEN must have the same number of entries"
|
||||
)
|
||||
|
||||
for i, base_url in enumerate(server_baseurls):
|
||||
base_url = base_url.strip()
|
||||
if base_url[-1] == "/":
|
||||
base_url = base_url[:-1]
|
||||
|
||||
if server_type == "jellyfin":
|
||||
server = Jellyfin(
|
||||
env=env, base_url=base_url, token=server_tokens[i].strip()
|
||||
)
|
||||
servers.append(server)
|
||||
|
||||
elif server_type == "emby":
|
||||
server = Emby(env=env, base_url=base_url, token=server_tokens[i].strip())
|
||||
servers.append(server)
|
||||
else:
|
||||
raise Exception("Unknown server type")
|
||||
|
||||
logger.debug(f"{server_type} Server {i} info: {server.info()}")
|
||||
|
||||
return servers
|
||||
|
||||
|
||||
def generate_server_connections(env) -> list[Plex | Jellyfin | Emby]:
|
||||
servers: list[Plex | Jellyfin | Emby] = []
|
||||
|
||||
plex_baseurl_str: str | None = get_env_value(env, "PLEX_BASEURL", None)
|
||||
plex_token_str: str | None = get_env_value(env, "PLEX_TOKEN", None)
|
||||
plex_username_str: str | None = get_env_value(env, "PLEX_USERNAME", None)
|
||||
plex_password_str: str | None = get_env_value(env, "PLEX_PASSWORD", None)
|
||||
plex_servername_str: str | None = get_env_value(env, "PLEX_SERVERNAME", None)
|
||||
ssl_bypass = str_to_bool(get_env_value(env, "SSL_BYPASS", "False"))
|
||||
|
||||
print(f"if plex_baseurl_str={plex_baseurl_str} and plex_token_str={plex_token_str}")
|
||||
if plex_baseurl_str and plex_token_str:
|
||||
plex_baseurl = plex_baseurl_str.split(",")
|
||||
plex_token = plex_token_str.split(",")
|
||||
|
||||
if len(plex_baseurl) != len(plex_token):
|
||||
raise Exception(
|
||||
"PLEX_BASEURL and PLEX_TOKEN must have the same number of entries"
|
||||
)
|
||||
|
||||
for i, url in enumerate(plex_baseurl):
|
||||
print(f"Plex({url.strip()}, {plex_token[i].strip()})")
|
||||
server = Plex(
|
||||
env,
|
||||
base_url=url.strip(),
|
||||
token=plex_token[i].strip(),
|
||||
user_name=None,
|
||||
password=None,
|
||||
server_name=None,
|
||||
ssl_bypass=ssl_bypass,
|
||||
)
|
||||
|
||||
logger.debug(f"Plex Server {i} info: {server.info()}")
|
||||
|
||||
servers.append(server)
|
||||
|
||||
if plex_username_str and plex_password_str and plex_servername_str:
|
||||
plex_username = plex_username_str.split(",")
|
||||
plex_password = plex_password_str.split(",")
|
||||
plex_servername = plex_servername_str.split(",")
|
||||
|
||||
if len(plex_username) != len(plex_password) or len(plex_username) != len(
|
||||
plex_servername
|
||||
):
|
||||
raise Exception(
|
||||
"PLEX_USERNAME, PLEX_PASSWORD and PLEX_SERVERNAME must have the same number of entries"
|
||||
)
|
||||
|
||||
for i, username in enumerate(plex_username):
|
||||
server = Plex(
|
||||
env,
|
||||
base_url=None,
|
||||
token=None,
|
||||
user_name=username.strip(),
|
||||
password=plex_password[i].strip(),
|
||||
server_name=plex_servername[i].strip(),
|
||||
ssl_bypass=ssl_bypass,
|
||||
)
|
||||
|
||||
logger.debug(f"Plex Server {i} info: {server.info()}")
|
||||
servers.append(server)
|
||||
|
||||
jellyfin_baseurl = get_env_value(env, "JELLYFIN_BASEURL", None)
|
||||
jellyfin_token = get_env_value(env, "JELLYFIN_TOKEN", None)
|
||||
if jellyfin_baseurl and jellyfin_token:
|
||||
servers.extend(
|
||||
jellyfin_emby_server_connection(
|
||||
env, jellyfin_baseurl, jellyfin_token, "jellyfin"
|
||||
)
|
||||
)
|
||||
|
||||
emby_baseurl = get_env_value(env, "EMBY_BASEURL", None)
|
||||
emby_token = get_env_value(env, "EMBY_TOKEN", None)
|
||||
if emby_baseurl and emby_token:
|
||||
servers.extend(
|
||||
jellyfin_emby_server_connection(env, emby_baseurl, emby_token, "emby")
|
||||
)
|
||||
|
||||
return servers
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
from src.jellyfin_emby import JellyfinEmby
|
||||
from packaging.version import parse, Version
|
||||
from loguru import logger
|
||||
|
||||
|
||||
class Emby(JellyfinEmby):
|
||||
def __init__(self, env, base_url: str, token: str) -> None:
|
||||
authorization = (
|
||||
"Emby , "
|
||||
'Client="JellyPlex-Watched", '
|
||||
'Device="script", '
|
||||
'DeviceId="script", '
|
||||
'Version="6.0.2"'
|
||||
)
|
||||
headers = {
|
||||
"Accept": "application/json",
|
||||
"X-Emby-Token": token,
|
||||
"X-Emby-Authorization": authorization,
|
||||
}
|
||||
|
||||
super().__init__(
|
||||
env, server_type="Emby", base_url=base_url, token=token, headers=headers
|
||||
)
|
||||
|
||||
def is_partial_update_supported(self, server_version: Version) -> bool:
|
||||
if not server_version >= parse("4.4"):
|
||||
logger.info(
|
||||
f"{self.server_type}: Server version {server_version} does not support updating playback position.",
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
288
src/functions.py
288
src/functions.py
|
|
@ -1,150 +1,138 @@
|
|||
import os
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv(override=True)
|
||||
|
||||
logfile = os.getenv("LOGFILE","log.log")
|
||||
|
||||
def logger(message: str, log_type=0):
|
||||
debug = str_to_bool(os.getenv("DEBUG", "True"))
|
||||
debug_level = os.getenv("DEBUG_LEVEL", "info").lower()
|
||||
|
||||
output = str(message)
|
||||
if log_type == 0:
|
||||
pass
|
||||
elif log_type == 1 and (debug or debug_level == "info"):
|
||||
output = f"[INFO]: {output}"
|
||||
elif log_type == 2:
|
||||
output = f"[ERROR]: {output}"
|
||||
elif log_type == 3 and (debug and debug_level == "debug"):
|
||||
output = f"[DEBUG]: {output}"
|
||||
else:
|
||||
output = None
|
||||
|
||||
if output is not None:
|
||||
print(output)
|
||||
file = open(logfile, "a", encoding="utf-8")
|
||||
file.write(output + "\n")
|
||||
|
||||
# Reimplementation of distutils.util.strtobool due to it being deprecated
|
||||
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
|
||||
def str_to_bool(value: any) -> bool:
|
||||
if not value:
|
||||
return False
|
||||
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
|
||||
|
||||
# Get mapped value
|
||||
def search_mapping(dictionary: dict, key_value: str):
|
||||
if key_value in dictionary.keys():
|
||||
return dictionary[key_value]
|
||||
elif key_value.lower() in dictionary.keys():
|
||||
return dictionary[key_value.lower()]
|
||||
elif key_value in dictionary.values():
|
||||
return list(dictionary.keys())[list(dictionary.values()).index(key_value)]
|
||||
elif key_value.lower() in dictionary.values():
|
||||
return list(dictionary.keys())[list(dictionary.values()).index(key_value.lower())]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping):
|
||||
skip_reason = None
|
||||
|
||||
if library_type.lower() in blacklist_library_type:
|
||||
skip_reason = "is blacklist_library_type"
|
||||
|
||||
if library_title.lower() in [x.lower() for x in blacklist_library]:
|
||||
skip_reason = "is blacklist_library"
|
||||
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
if library_other:
|
||||
if library_other.lower() in [x.lower() for x in blacklist_library]:
|
||||
skip_reason = "is blacklist_library"
|
||||
|
||||
if len(whitelist_library_type) > 0:
|
||||
if library_type.lower() not in whitelist_library_type:
|
||||
skip_reason = "is not whitelist_library_type"
|
||||
|
||||
# if whitelist is not empty and library is not in whitelist
|
||||
if len(whitelist_library) > 0:
|
||||
if library_title.lower() not in [x.lower() for x in whitelist_library]:
|
||||
skip_reason = "is not whitelist_library"
|
||||
|
||||
if library_other:
|
||||
if library_other.lower() not in [x.lower() for x in whitelist_library]:
|
||||
skip_reason = "is not whitelist_library"
|
||||
|
||||
return skip_reason
|
||||
|
||||
|
||||
def generate_library_guids_dict(user_list: dict, generate_output: int):
|
||||
# if generate_output is 0 then only generate shows, if 1 then only generate episodes, if 2 then generate movies, if 3 then generate shows and episodes
|
||||
show_output_dict = {}
|
||||
episode_output_dict = {}
|
||||
movies_output_dict = {}
|
||||
|
||||
if generate_output in (0, 3):
|
||||
show_output_keys = user_list.keys()
|
||||
show_output_keys = ([ dict(x) for x in list(show_output_keys) ])
|
||||
for show_key in show_output_keys:
|
||||
for provider_key, provider_value in show_key.items():
|
||||
# Skip title
|
||||
if provider_key.lower() == "title":
|
||||
continue
|
||||
if provider_key.lower() not in show_output_dict:
|
||||
show_output_dict[provider_key.lower()] = []
|
||||
if provider_key.lower() == "locations":
|
||||
for show_location in provider_value:
|
||||
show_output_dict[provider_key.lower()].append(show_location)
|
||||
else:
|
||||
show_output_dict[provider_key.lower()].append(provider_value.lower())
|
||||
|
||||
if generate_output in (1, 3):
|
||||
for show in user_list:
|
||||
for season in user_list[show]:
|
||||
for episode in user_list[show][season]:
|
||||
for episode_key, episode_value in episode.items():
|
||||
if episode_key.lower() not in episode_output_dict:
|
||||
episode_output_dict[episode_key.lower()] = []
|
||||
if episode_key == "locations":
|
||||
for episode_location in episode_value:
|
||||
episode_output_dict[episode_key.lower()].append(episode_location)
|
||||
else:
|
||||
episode_output_dict[episode_key.lower()].append(episode_value.lower())
|
||||
|
||||
if generate_output == 2:
|
||||
for movie in user_list:
|
||||
for movie_key, movie_value in movie.items():
|
||||
if movie_key.lower() not in movies_output_dict:
|
||||
movies_output_dict[movie_key.lower()] = []
|
||||
if movie_key == "locations":
|
||||
for movie_location in movie_value:
|
||||
movies_output_dict[movie_key.lower()].append(movie_location)
|
||||
else:
|
||||
movies_output_dict[movie_key.lower()].append(movie_value.lower())
|
||||
|
||||
return show_output_dict, episode_output_dict, movies_output_dict
|
||||
|
||||
def future_thread_executor(args: list, workers: int = -1):
|
||||
futures_list = []
|
||||
results = []
|
||||
|
||||
if workers == -1:
|
||||
workers = min(32, os.cpu_count()*1.25)
|
||||
|
||||
with ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
for arg in args:
|
||||
# * arg unpacks the list into actual arguments
|
||||
futures_list.append(executor.submit(*arg))
|
||||
|
||||
for future in futures_list:
|
||||
try:
|
||||
result = future.result()
|
||||
results.append(result)
|
||||
except Exception as e:
|
||||
raise Exception(e)
|
||||
|
||||
return results
|
||||
import os
|
||||
from concurrent.futures import Future, ThreadPoolExecutor
|
||||
from typing import Any, Callable
|
||||
from dotenv import load_dotenv
|
||||
import re
|
||||
from pathlib import PureWindowsPath, PurePosixPath
|
||||
|
||||
load_dotenv(override=True)
|
||||
|
||||
|
||||
def log_marked(
|
||||
server_type: str,
|
||||
server_name: str,
|
||||
username: str,
|
||||
library: str,
|
||||
movie_show: str,
|
||||
episode: str | None = None,
|
||||
duration: float | None = None,
|
||||
mark_file: str = "mark.log",
|
||||
) -> None:
|
||||
output = f"{server_type}/{server_name}/{username}/{library}/{movie_show}"
|
||||
|
||||
if episode:
|
||||
output += f"/{episode}"
|
||||
|
||||
if duration:
|
||||
output += f"/{duration}"
|
||||
|
||||
with open(mark_file, "a", encoding="utf-8") as file:
|
||||
file.write(output + "\n")
|
||||
|
||||
|
||||
def get_env_value(env, key: str, default: Any = None):
|
||||
if env and key in env:
|
||||
return env[key]
|
||||
elif os.getenv(key):
|
||||
return os.getenv(key)
|
||||
else:
|
||||
return default
|
||||
|
||||
|
||||
# Reimplementation of distutils.util.strtobool due to it being deprecated
|
||||
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
|
||||
def str_to_bool(value: str | None) -> bool:
|
||||
if not value:
|
||||
return False
|
||||
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
|
||||
|
||||
|
||||
# Get mapped value
|
||||
def search_mapping(dictionary: dict[str, str], key_value: str) -> str | None:
|
||||
if key_value in dictionary.keys():
|
||||
return dictionary[key_value]
|
||||
elif key_value.lower() in dictionary.keys():
|
||||
return dictionary[key_value.lower()]
|
||||
elif key_value in dictionary.values():
|
||||
return list(dictionary.keys())[list(dictionary.values()).index(key_value)]
|
||||
elif key_value.lower() in dictionary.values():
|
||||
return list(dictionary.keys())[
|
||||
list(dictionary.values()).index(key_value.lower())
|
||||
]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
# Return list of objects that exist in both lists including mappings
|
||||
def match_list(
|
||||
list1: list[str], list2: list[str], list_mapping: dict[str, str] | None = None
|
||||
) -> list[str]:
|
||||
output: list[str] = []
|
||||
for element in list1:
|
||||
if element in list2:
|
||||
output.append(element)
|
||||
elif list_mapping:
|
||||
element_other = search_mapping(list_mapping, element)
|
||||
if element_other in list2:
|
||||
output.append(element)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def future_thread_executor(
|
||||
args: list[tuple[Callable[..., Any], ...]],
|
||||
threads: int | None = None,
|
||||
override_threads: bool = False,
|
||||
max_threads: int | None = None,
|
||||
) -> list[Any]:
|
||||
results: list[Any] = []
|
||||
|
||||
# Determine the number of workers, defaulting to 1 if os.cpu_count() returns None
|
||||
cpu_threads: int = os.cpu_count() or 1 # Default to 1 if os.cpu_count() is None
|
||||
workers: int = min(max_threads, cpu_threads * 2) if max_threads else cpu_threads * 2
|
||||
|
||||
# Adjust workers based on threads parameter and override_threads flag
|
||||
if threads is not None:
|
||||
workers = min(threads, workers)
|
||||
if override_threads:
|
||||
workers = threads if threads is not None else workers
|
||||
|
||||
# If only one worker, run in main thread to avoid overhead
|
||||
if workers == 1:
|
||||
for arg in args:
|
||||
results.append(arg[0](*arg[1:]))
|
||||
return results
|
||||
|
||||
with ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
futures_list: list[Future[Any]] = []
|
||||
|
||||
for arg in args:
|
||||
# * arg unpacks the list into actual arguments
|
||||
futures_list.append(executor.submit(*arg))
|
||||
|
||||
for out in futures_list:
|
||||
try:
|
||||
result = out.result()
|
||||
results.append(result)
|
||||
except Exception as e:
|
||||
raise Exception(e)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def parse_string_to_list(string: str | None) -> list[str]:
|
||||
output: list[str] = []
|
||||
if string and len(string) > 0:
|
||||
output = string.split(",")
|
||||
|
||||
return output
|
||||
|
||||
|
||||
_WINDOWS_DRIVE = re.compile(r"^[A-Za-z]:") # e.g. C: D:
|
||||
|
||||
|
||||
def filename_from_any_path(p: str) -> str:
|
||||
# Windows-y if UNC (\\server\share), drive letter, or has backslashes
|
||||
if p.startswith("\\\\") or _WINDOWS_DRIVE.match(p) or ("\\" in p and "/" not in p):
|
||||
return PureWindowsPath(p).name
|
||||
return PurePosixPath(p).name
|
||||
|
|
|
|||
341
src/jellyfin.py
341
src/jellyfin.py
|
|
@ -1,319 +1,32 @@
|
|||
import requests
|
||||
from src.functions import logger, search_mapping, str_to_bool, check_skip_logic, generate_library_guids_dict, future_thread_executor
|
||||
|
||||
class Jellyfin():
|
||||
def __init__(self, baseurl, token):
|
||||
self.baseurl = baseurl
|
||||
self.token = token
|
||||
self.session = requests.Session()
|
||||
|
||||
if not self.baseurl:
|
||||
raise Exception("Jellyfin baseurl not set")
|
||||
|
||||
if not self.token:
|
||||
raise Exception("Jellyfin token not set")
|
||||
|
||||
self.users = self.get_users()
|
||||
from src.jellyfin_emby import JellyfinEmby
|
||||
from packaging.version import parse, Version
|
||||
from loguru import logger
|
||||
|
||||
|
||||
def query(self, query, query_type):
|
||||
try:
|
||||
response = None
|
||||
class Jellyfin(JellyfinEmby):
|
||||
def __init__(self, env, base_url: str, token: str) -> None:
|
||||
authorization = (
|
||||
"MediaBrowser , "
|
||||
'Client="JellyPlex-Watched", '
|
||||
'Device="script", '
|
||||
'DeviceId="script", '
|
||||
'Version="6.0.2", '
|
||||
f'Token="{token}"'
|
||||
)
|
||||
headers = {
|
||||
"Accept": "application/json",
|
||||
"Authorization": authorization,
|
||||
}
|
||||
|
||||
headers = {
|
||||
"Accept": "application/json",
|
||||
"X-Emby-Token": self.token
|
||||
}
|
||||
if query_type == "get":
|
||||
response = self.session.get(self.baseurl + query, headers=headers)
|
||||
super().__init__(
|
||||
env, server_type="Jellyfin", base_url=base_url, token=token, headers=headers
|
||||
)
|
||||
|
||||
elif query_type == "post":
|
||||
authorization = (
|
||||
'MediaBrowser , '
|
||||
'Client="other", '
|
||||
'Device="script", '
|
||||
'DeviceId="script", '
|
||||
'Version="0.0.0"'
|
||||
)
|
||||
headers["X-Emby-Authorization"] = authorization
|
||||
response = self.session.post(self.baseurl + query, headers=headers)
|
||||
def is_partial_update_supported(self, server_version: Version) -> bool:
|
||||
if not server_version >= parse("10.9.0"):
|
||||
logger.info(
|
||||
f"{self.server_type}: Server version {server_version} does not support updating playback position.",
|
||||
)
|
||||
return False
|
||||
|
||||
return response.json()
|
||||
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Query failed {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_users(self):
|
||||
try:
|
||||
users = {}
|
||||
|
||||
query = "/Users"
|
||||
response = self.query(query, "get")
|
||||
|
||||
# If reponse is not empty
|
||||
if response:
|
||||
for user in response:
|
||||
users[user["Name"]] = user["Id"]
|
||||
|
||||
return users
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Get users failed {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_user_watched(self, user_name, user_id, library_type, library_id, library_title):
|
||||
try:
|
||||
user_name = user_name.lower()
|
||||
user_watched = {}
|
||||
user_watched[user_name] = {}
|
||||
|
||||
logger(f"Jellyfin: Generating watched for {user_name} in library {library_title}", 0)
|
||||
# Movies
|
||||
if library_type == "Movie":
|
||||
user_watched[user_name][library_title] = []
|
||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Filters=IsPlayed&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
||||
for movie in watched["Items"]:
|
||||
if movie["UserData"]["Played"] == True:
|
||||
movie_guids = {}
|
||||
movie_guids["title"] = movie["Name"]
|
||||
if movie["ProviderIds"]:
|
||||
# Lowercase movie["ProviderIds"] keys
|
||||
movie_guids = {k.lower(): v for k, v in movie["ProviderIds"].items()}
|
||||
if movie["MediaSources"]:
|
||||
movie_guids["locations"] = tuple([x["Path"].split("/")[-1] for x in movie["MediaSources"]])
|
||||
user_watched[user_name][library_title].append(movie_guids)
|
||||
|
||||
# TV Shows
|
||||
if library_type == "Episode":
|
||||
user_watched[user_name][library_title] = {}
|
||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Fields=ItemCounts,ProviderIds,Path", "get")
|
||||
watched_shows = [x for x in watched["Items"] if x["Type"] == "Series"]
|
||||
|
||||
for show in watched_shows:
|
||||
show_guids = {k.lower(): v for k, v in show["ProviderIds"].items()}
|
||||
show_guids["title"] = show["Name"]
|
||||
show_guids["locations"] = tuple([show["Path"].split("/")[-1]])
|
||||
show_guids = frozenset(show_guids.items())
|
||||
seasons = self.query(f"/Shows/{show['Id']}/Seasons?userId={user_id}&Fields=ItemCounts,ProviderIds", "get")
|
||||
if len(seasons["Items"]) > 0:
|
||||
for season in seasons["Items"]:
|
||||
episodes = self.query(f"/Shows/{show['Id']}/Episodes?seasonId={season['Id']}&userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
||||
if len(episodes["Items"]) > 0:
|
||||
for episode in episodes["Items"]:
|
||||
if episode["UserData"]["Played"] == True:
|
||||
if episode["ProviderIds"] or episode["MediaSources"]:
|
||||
if show_guids not in user_watched[user_name][library_title]:
|
||||
user_watched[user_name][library_title][show_guids] = {}
|
||||
if season["Name"] not in user_watched[user_name][library_title][show_guids]:
|
||||
user_watched[user_name][library_title][show_guids][season["Name"]] = []
|
||||
|
||||
# Lowercase episode["ProviderIds"] keys
|
||||
episode_guids = {}
|
||||
if episode["ProviderIds"]:
|
||||
episode_guids = {k.lower(): v for k, v in episode["ProviderIds"].items()}
|
||||
if episode["MediaSources"]:
|
||||
episode_guids["locations"] = tuple([x["Path"].split("/")[-1] for x in episode["MediaSources"]])
|
||||
user_watched[user_name][library_title][show_guids][season["Name"]].append(episode_guids)
|
||||
|
||||
return user_watched
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Failed to get watched for {user_name} in library {library_title}, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
def get_watched(self, users, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping=None):
|
||||
try:
|
||||
users_watched = {}
|
||||
args = []
|
||||
|
||||
for user_name, user_id in users.items():
|
||||
# Get all libraries
|
||||
user_name = user_name.lower()
|
||||
|
||||
libraries = self.query(f"/Users/{user_id}/Views", "get")["Items"]
|
||||
|
||||
for library in libraries:
|
||||
library_title = library["Name"]
|
||||
library_id = library["Id"]
|
||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Filters=IsPlayed&limit=1", "get")
|
||||
|
||||
if len(watched["Items"]) == 0:
|
||||
logger(f"Jellyfin: No watched items found in library {library_title}", 1)
|
||||
continue
|
||||
else:
|
||||
library_type = watched["Items"][0]["Type"]
|
||||
|
||||
skip_reason = check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping)
|
||||
|
||||
if skip_reason:
|
||||
logger(f"Jellyfin: Skipping library {library_title} {skip_reason}", 1)
|
||||
continue
|
||||
|
||||
args.append([self.get_user_watched, user_name, user_id, library_type, library_id, library_title])
|
||||
|
||||
for user_watched in future_thread_executor(args):
|
||||
for user, user_watched_temp in user_watched.items():
|
||||
if user not in users_watched:
|
||||
users_watched[user] = {}
|
||||
users_watched[user].update(user_watched_temp)
|
||||
|
||||
return users_watched
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Failed to get watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def update_user_watched(self, user, user_id, library, library_id, videos, dryrun):
|
||||
try:
|
||||
logger(f"Jellyfin: Updating watched for {user} in library {library}", 1)
|
||||
library_search = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&limit=1", "get")
|
||||
library_type = library_search["Items"][0]["Type"]
|
||||
|
||||
# Movies
|
||||
if library_type == "Movie":
|
||||
_, _, videos_movies_ids = generate_library_guids_dict(videos, 2)
|
||||
|
||||
jellyfin_search = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
||||
for jellyfin_video in jellyfin_search["Items"]:
|
||||
movie_found = False
|
||||
|
||||
if "MediaSources" in jellyfin_video:
|
||||
for movie_location in jellyfin_video["MediaSources"]:
|
||||
if movie_location["Path"].split("/")[-1] in videos_movies_ids["locations"]:
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if not movie_found:
|
||||
for movie_provider_source, movie_provider_id in jellyfin_video["ProviderIds"].items():
|
||||
if movie_provider_source.lower() in videos_movies_ids:
|
||||
if movie_provider_id.lower() in videos_movies_ids[movie_provider_source.lower()]:
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if movie_found:
|
||||
jellyfin_video_id = jellyfin_video["Id"]
|
||||
msg = f"{jellyfin_video['Name']} as watched for {user} in {library} for Jellyfin"
|
||||
if not dryrun:
|
||||
logger(f"Marking {msg}", 0)
|
||||
self.query(f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}", "post")
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
|
||||
|
||||
# TV Shows
|
||||
if library_type == "Episode":
|
||||
videos_shows_ids, videos_episode_ids, _ = generate_library_guids_dict(videos, 3)
|
||||
|
||||
jellyfin_search = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}&isPlayed=false&Fields=ItemCounts,ProviderIds,Path", "get")
|
||||
jellyfin_shows = [x for x in jellyfin_search["Items"]]
|
||||
|
||||
for jellyfin_show in jellyfin_shows:
|
||||
show_found = False
|
||||
|
||||
if jellyfin_show["Name"] == "The 13 Ghosts of Scooby-Doo":
|
||||
print(jellyfin_show)
|
||||
|
||||
if "Path" in jellyfin_show:
|
||||
if jellyfin_show["Path"].split("/")[-1] in videos_shows_ids["locations"]:
|
||||
show_found = True
|
||||
|
||||
if not show_found:
|
||||
for show_provider_source, show_provider_id in jellyfin_show["ProviderIds"].items():
|
||||
if show_provider_source.lower() in videos_shows_ids:
|
||||
if show_provider_id.lower() in videos_shows_ids[show_provider_source.lower()]:
|
||||
show_found = True
|
||||
break
|
||||
|
||||
if show_found:
|
||||
jellyfin_show_id = jellyfin_show["Id"]
|
||||
jellyfin_episodes = self.query(f"/Shows/{jellyfin_show_id}/Episodes?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
||||
|
||||
for jellyfin_episode in jellyfin_episodes["Items"]:
|
||||
episode_found = False
|
||||
|
||||
if "MediaSources" in jellyfin_episode:
|
||||
for episode_location in jellyfin_episode["MediaSources"]:
|
||||
if episode_location["Path"].split("/")[-1] in videos_episode_ids["locations"]:
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if not episode_found:
|
||||
for episode_provider_source, episode_provider_id in jellyfin_episode["ProviderIds"].items():
|
||||
if episode_provider_source.lower() in videos_episode_ids:
|
||||
if episode_provider_id.lower() in videos_episode_ids[episode_provider_source.lower()]:
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if episode_found:
|
||||
jellyfin_episode_id = jellyfin_episode["Id"]
|
||||
msg = f"{jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode['IndexNumber']} {jellyfin_episode['Name']} as watched for {user} in {library} for Jellyfin"
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
self.query(f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}", "post")
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Error updating watched for {user} in library {library}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
def update_watched(self, watched_list, user_mapping=None, library_mapping=None, dryrun=False):
|
||||
try:
|
||||
args = []
|
||||
for user, libraries in watched_list.items():
|
||||
user_other = None
|
||||
if user_mapping:
|
||||
if user in user_mapping.keys():
|
||||
user_other = user_mapping[user]
|
||||
elif user in user_mapping.values():
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
|
||||
user_id = None
|
||||
for key in self.users.keys():
|
||||
if user.lower() == key.lower():
|
||||
user_id = self.users[key]
|
||||
break
|
||||
elif user_other and user_other.lower() == key.lower():
|
||||
user_id = self.users[key]
|
||||
break
|
||||
|
||||
if not user_id:
|
||||
logger(f"{user} {user_other} not found in Jellyfin", 2)
|
||||
continue
|
||||
|
||||
jellyfin_libraries = self.query(f"/Users/{user_id}/Views", "get")["Items"]
|
||||
|
||||
for library, videos in libraries.items():
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
if library in library_mapping.keys():
|
||||
library_other = library_mapping[library]
|
||||
elif library in library_mapping.values():
|
||||
library_other = search_mapping(library_mapping, library)
|
||||
|
||||
|
||||
if library.lower() not in [x["Name"].lower() for x in jellyfin_libraries]:
|
||||
if library_other:
|
||||
if library_other.lower() in [x["Name"].lower() for x in jellyfin_libraries]:
|
||||
logger(f"Jellyfin: Library {library} not found, but {library_other} found, using {library_other}", 1)
|
||||
library = library_other
|
||||
else:
|
||||
logger(f"Jellyfin: Library {library} or {library_other} not found in library list", 2)
|
||||
continue
|
||||
else:
|
||||
logger(f"Jellyfin: Library {library} not found in library list", 2)
|
||||
continue
|
||||
|
||||
library_id = None
|
||||
for jellyfin_library in jellyfin_libraries:
|
||||
if jellyfin_library["Name"] == library:
|
||||
library_id = jellyfin_library["Id"]
|
||||
continue
|
||||
|
||||
if library_id:
|
||||
args.append([self.update_user_watched, user, user_id, library, library_id, videos, dryrun])
|
||||
|
||||
future_thread_executor(args)
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Error updating watched", 2)
|
||||
raise Exception(e)
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -0,0 +1,909 @@
|
|||
# Functions for Jellyfin and Emby
|
||||
|
||||
from datetime import datetime
|
||||
import requests
|
||||
import traceback
|
||||
from math import floor
|
||||
from typing import Any, Literal
|
||||
from packaging.version import parse, Version
|
||||
from loguru import logger
|
||||
|
||||
from src.functions import (
|
||||
filename_from_any_path,
|
||||
search_mapping,
|
||||
log_marked,
|
||||
str_to_bool,
|
||||
get_env_value,
|
||||
)
|
||||
from src.watched import (
|
||||
LibraryData,
|
||||
MediaIdentifiers,
|
||||
MediaItem,
|
||||
WatchedStatus,
|
||||
Series,
|
||||
UserData,
|
||||
check_same_identifiers,
|
||||
)
|
||||
|
||||
|
||||
def extract_identifiers_from_item(
|
||||
server_type: str,
|
||||
item: dict[str, Any],
|
||||
generate_guids: bool,
|
||||
generate_locations: bool,
|
||||
) -> MediaIdentifiers:
|
||||
title = item.get("Name")
|
||||
id = None
|
||||
if not title:
|
||||
id = item.get("Id")
|
||||
logger.debug(f"{server_type}: Name not found for {id}")
|
||||
|
||||
guids = {}
|
||||
if generate_guids:
|
||||
guids = {k.lower(): v for k, v in item.get("ProviderIds", {}).items()}
|
||||
|
||||
locations: tuple[str, ...] = tuple()
|
||||
full_path: str = ""
|
||||
if generate_locations:
|
||||
if item.get("Path"):
|
||||
full_path = item["Path"]
|
||||
locations = tuple([filename_from_any_path(full_path)])
|
||||
elif item.get("MediaSources"):
|
||||
full_paths = [x["Path"] for x in item["MediaSources"] if x.get("Path")]
|
||||
locations = tuple([filename_from_any_path(x) for x in full_paths])
|
||||
full_path = " ".join(full_paths)
|
||||
|
||||
if generate_guids:
|
||||
if not guids:
|
||||
logger.debug(
|
||||
f"{server_type}: {title if title else id} has no guids{f', locations: {full_path}' if full_path else ''}",
|
||||
)
|
||||
|
||||
if generate_locations:
|
||||
if not locations:
|
||||
logger.debug(
|
||||
f"{server_type}: {title if title else id} has no locations{f', guids: {guids}' if guids else ''}",
|
||||
)
|
||||
|
||||
return MediaIdentifiers(
|
||||
title=title,
|
||||
locations=locations,
|
||||
imdb_id=guids.get("imdb"),
|
||||
tvdb_id=guids.get("tvdb"),
|
||||
tmdb_id=guids.get("tmdb"),
|
||||
)
|
||||
|
||||
|
||||
def get_mediaitem(
|
||||
server_type: str,
|
||||
item: dict[str, Any],
|
||||
generate_guids: bool,
|
||||
generate_locations: bool,
|
||||
) -> MediaItem:
|
||||
user_data = item.get("UserData", {})
|
||||
last_played_date = user_data.get("LastPlayedDate")
|
||||
|
||||
viewed_date = datetime.today()
|
||||
if last_played_date:
|
||||
viewed_date = datetime.fromisoformat(last_played_date.replace("Z", "+00:00"))
|
||||
|
||||
return MediaItem(
|
||||
identifiers=extract_identifiers_from_item(
|
||||
server_type, item, generate_guids, generate_locations
|
||||
),
|
||||
status=WatchedStatus(
|
||||
completed=user_data.get("Played"),
|
||||
time=floor(user_data.get("PlaybackPositionTicks", 0) / 10000),
|
||||
viewed_date=viewed_date,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class JellyfinEmby:
|
||||
def __init__(
|
||||
self,
|
||||
env,
|
||||
server_type: Literal["Jellyfin", "Emby"],
|
||||
base_url: str,
|
||||
token: str,
|
||||
headers: dict[str, str],
|
||||
) -> None:
|
||||
self.env = env
|
||||
|
||||
if server_type not in ["Jellyfin", "Emby"]:
|
||||
raise Exception(f"Server type {server_type} not supported")
|
||||
self.server_type: str = server_type
|
||||
self.base_url: str = base_url
|
||||
self.token: str = token
|
||||
self.headers: dict[str, str] = headers
|
||||
self.timeout: int = int(get_env_value(self.env, "REQUEST_TIMEOUT", 300))
|
||||
|
||||
if not self.base_url:
|
||||
raise Exception(f"{self.server_type} base_url not set")
|
||||
|
||||
if not self.token:
|
||||
raise Exception(f"{self.server_type} token not set")
|
||||
|
||||
self.session = requests.Session()
|
||||
self.users: dict[str, str] = self.get_users()
|
||||
self.server_name: str = self.info(name_only=True)
|
||||
self.server_version: Version = self.info(version_only=True)
|
||||
self.update_partial: bool = self.is_partial_update_supported(
|
||||
self.server_version
|
||||
)
|
||||
self.generate_guids: bool = str_to_bool(
|
||||
get_env_value(self.env, "GENERATE_GUIDS", "True")
|
||||
)
|
||||
self.generate_locations: bool = str_to_bool(
|
||||
get_env_value(self.env, "GENERATE_LOCATIONS", "True")
|
||||
)
|
||||
|
||||
def query(
|
||||
self,
|
||||
query: str,
|
||||
query_type: Literal["get", "post"],
|
||||
identifiers: dict[str, str] | None = None,
|
||||
json: dict[str, float] | None = None,
|
||||
) -> list[dict[str, Any]] | dict[str, Any] | None:
|
||||
try:
|
||||
results = None
|
||||
|
||||
if query_type == "get":
|
||||
response = self.session.get(
|
||||
self.base_url + query, headers=self.headers, timeout=self.timeout
|
||||
)
|
||||
if response.status_code not in [200, 204]:
|
||||
raise Exception(
|
||||
f"Query failed with status {response.status_code} {response.reason}"
|
||||
)
|
||||
if response.status_code == 204:
|
||||
results = None
|
||||
else:
|
||||
results = response.json()
|
||||
|
||||
elif query_type == "post":
|
||||
response = self.session.post(
|
||||
self.base_url + query,
|
||||
headers=self.headers,
|
||||
json=json,
|
||||
timeout=self.timeout,
|
||||
)
|
||||
if response.status_code not in [200, 204]:
|
||||
raise Exception(
|
||||
f"Query failed with status {response.status_code} {response.reason}"
|
||||
)
|
||||
if response.status_code == 204:
|
||||
results = None
|
||||
else:
|
||||
results = response.json()
|
||||
|
||||
if results:
|
||||
if not isinstance(results, list) and not isinstance(results, dict):
|
||||
raise Exception("Query result is not of type list or dict")
|
||||
|
||||
# append identifiers to results
|
||||
if identifiers and isinstance(results, dict):
|
||||
results["Identifiers"] = identifiers
|
||||
|
||||
return results
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"{self.server_type}: Query {query_type} {query}\nResults {results}\n{e}",
|
||||
)
|
||||
raise Exception(e)
|
||||
|
||||
def info(
|
||||
self, name_only: bool = False, version_only: bool = False
|
||||
) -> str | Version | None:
|
||||
try:
|
||||
query_string = "/System/Info/Public"
|
||||
|
||||
response = self.query(query_string, "get")
|
||||
|
||||
if response and isinstance(response, dict):
|
||||
if name_only:
|
||||
return response.get("ServerName")
|
||||
elif version_only:
|
||||
return parse(response.get("Version", ""))
|
||||
|
||||
return f"{self.server_type} {response.get('ServerName')}: {response.get('Version')}"
|
||||
else:
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"{self.server_type}: Get server name failed {e}")
|
||||
raise Exception(e)
|
||||
|
||||
def get_users(self) -> dict[str, str]:
|
||||
try:
|
||||
users: dict[str, str] = {}
|
||||
|
||||
query_string = "/Users"
|
||||
response = self.query(query_string, "get")
|
||||
|
||||
if response and isinstance(response, list):
|
||||
for user in response:
|
||||
users[user["Name"]] = user["Id"]
|
||||
|
||||
return users
|
||||
except Exception as e:
|
||||
logger.error(f"{self.server_type}: Get users failed {e}")
|
||||
raise Exception(e)
|
||||
|
||||
def get_libraries(self) -> dict[str, str]:
|
||||
try:
|
||||
libraries: dict[str, str] = {}
|
||||
|
||||
# Theres no way to get all libraries so individually get list of libraries from all users
|
||||
users = self.get_users()
|
||||
|
||||
for user_name, user_id in users.items():
|
||||
user_libraries = self.query(f"/Users/{user_id}/Views", "get")
|
||||
|
||||
if not user_libraries or not isinstance(user_libraries, dict):
|
||||
logger.error(
|
||||
f"{self.server_type}: Failed to get libraries for {user_name}"
|
||||
)
|
||||
return libraries
|
||||
|
||||
logger.debug(
|
||||
f"{self.server_type}: All Libraries for {user_name} {[library.get('Name') for library in user_libraries.get('Items', [])]}"
|
||||
)
|
||||
|
||||
for library in user_libraries.get("Items", []):
|
||||
library_title = library.get("Name")
|
||||
library_type = library.get("CollectionType")
|
||||
|
||||
# If collection type is not set, fallback based on media files
|
||||
if not library_type:
|
||||
library_id = library.get("Id")
|
||||
# Get first 100 items in library
|
||||
library_items = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&Recursive=True&excludeItemTypes=Folder&limit=100",
|
||||
"get",
|
||||
)
|
||||
|
||||
if not library_items or not isinstance(library_items, dict):
|
||||
logger.debug(
|
||||
f"{self.server_type}: Failed to get library items for {user_name} {library_title}"
|
||||
)
|
||||
continue
|
||||
|
||||
all_types = set(
|
||||
[x.get("Type") for x in library_items.get("Items", [])]
|
||||
)
|
||||
types = set([x for x in all_types if x in ["Movie", "Episode"]])
|
||||
|
||||
if not len(types) == 1:
|
||||
logger.debug(
|
||||
f"{self.server_type}: Skipping Library {library_title} didn't find just a single type, found {all_types}",
|
||||
)
|
||||
continue
|
||||
|
||||
library_type = types.pop()
|
||||
|
||||
library_type = (
|
||||
"movies" if library_type == "Movie" else "tvshows"
|
||||
)
|
||||
|
||||
if library_type not in ["movies", "tvshows"]:
|
||||
logger.debug(
|
||||
f"{self.server_type}: Skipping Library {library_title} found type {library_type}",
|
||||
)
|
||||
continue
|
||||
|
||||
libraries[library_title] = library_type
|
||||
|
||||
return libraries
|
||||
except Exception as e:
|
||||
logger.error(f"{self.server_type}: Get libraries failed {e}")
|
||||
raise Exception(e)
|
||||
|
||||
def get_user_library_watched(
|
||||
self,
|
||||
user_name: str,
|
||||
user_id: str,
|
||||
library_type: Literal["movies", "tvshows"],
|
||||
library_id: str,
|
||||
library_title: str,
|
||||
) -> LibraryData:
|
||||
user_name = user_name.lower()
|
||||
try:
|
||||
logger.info(
|
||||
f"{self.server_type}: Generating watched for {user_name} in library {library_title}",
|
||||
)
|
||||
watched = LibraryData(title=library_title)
|
||||
|
||||
# Movies
|
||||
if library_type == "movies":
|
||||
movie_items = []
|
||||
watched_items = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&Filters=IsPlayed&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources,UserDataLastPlayedDate",
|
||||
"get",
|
||||
)
|
||||
|
||||
if watched_items and isinstance(watched_items, dict):
|
||||
movie_items += watched_items.get("Items", [])
|
||||
|
||||
in_progress_items = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&Filters=IsResumable&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources,UserDataLastPlayedDate",
|
||||
"get",
|
||||
)
|
||||
|
||||
if in_progress_items and isinstance(in_progress_items, dict):
|
||||
movie_items += in_progress_items.get("Items", [])
|
||||
|
||||
for movie in movie_items:
|
||||
# Skip if theres no user data which means the movie has not been watched
|
||||
if not movie.get("UserData"):
|
||||
continue
|
||||
|
||||
# Skip if theres no media tied to the movie
|
||||
if not movie.get("MediaSources"):
|
||||
continue
|
||||
|
||||
# Skip if not watched or watched less than a minute
|
||||
if (
|
||||
movie["UserData"].get("Played")
|
||||
or movie["UserData"].get("PlaybackPositionTicks", 0) > 600000000
|
||||
):
|
||||
watched.movies.append(
|
||||
get_mediaitem(
|
||||
self.server_type,
|
||||
movie,
|
||||
self.generate_guids,
|
||||
self.generate_locations,
|
||||
)
|
||||
)
|
||||
|
||||
# TV Shows
|
||||
if library_type == "tvshows":
|
||||
# Retrieve a list of watched TV shows
|
||||
all_shows = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&isPlaceHolder=false&IncludeItemTypes=Series&Recursive=True&Fields=ProviderIds,Path,RecursiveItemCount",
|
||||
"get",
|
||||
)
|
||||
|
||||
if not all_shows or not isinstance(all_shows, dict):
|
||||
logger.debug(
|
||||
f"{self.server_type}: Failed to get shows for {user_name} in {library_title}"
|
||||
)
|
||||
return watched
|
||||
|
||||
# Filter the list of shows to only include those that have been partially or fully watched
|
||||
watched_shows_filtered = []
|
||||
for show in all_shows.get("Items", []):
|
||||
if not show.get("UserData"):
|
||||
continue
|
||||
|
||||
played_percentage = show["UserData"].get("PlayedPercentage")
|
||||
if played_percentage is None:
|
||||
# Emby no longer shows PlayedPercentage
|
||||
total_episodes = show.get("RecursiveItemCount")
|
||||
unplayed_episodes = show["UserData"].get("UnplayedItemCount")
|
||||
|
||||
if total_episodes is None:
|
||||
# Failed to get total count of episodes
|
||||
continue
|
||||
|
||||
if (
|
||||
unplayed_episodes is not None
|
||||
and unplayed_episodes < total_episodes
|
||||
):
|
||||
watched_shows_filtered.append(show)
|
||||
else:
|
||||
if played_percentage > 0:
|
||||
watched_shows_filtered.append(show)
|
||||
|
||||
# Retrieve the watched/partially watched list of episodes of each watched show
|
||||
for show in watched_shows_filtered:
|
||||
show_name = show.get("Name")
|
||||
show_guids = {
|
||||
k.lower(): v for k, v in show.get("ProviderIds", {}).items()
|
||||
}
|
||||
show_locations = (
|
||||
tuple([filename_from_any_path(show["Path"])])
|
||||
if show.get("Path")
|
||||
else tuple()
|
||||
)
|
||||
|
||||
show_episodes = self.query(
|
||||
f"/Shows/{show.get('Id')}/Episodes"
|
||||
+ f"?userId={user_id}&isPlaceHolder=false&Fields=ProviderIds,MediaSources,UserDataLastPlayedDate",
|
||||
"get",
|
||||
)
|
||||
|
||||
if not show_episodes or not isinstance(show_episodes, dict):
|
||||
logger.debug(
|
||||
f"{self.server_type}: Failed to get episodes for {user_name} {library_title} {show_name}"
|
||||
)
|
||||
continue
|
||||
|
||||
# Iterate through the episodes
|
||||
# Create a list to store the episodes
|
||||
episode_mediaitem = []
|
||||
for episode in show_episodes.get("Items", []):
|
||||
if not episode.get("UserData"):
|
||||
continue
|
||||
|
||||
if not episode.get("MediaSources"):
|
||||
continue
|
||||
|
||||
# If watched or watched more than a minute
|
||||
if (
|
||||
episode["UserData"].get("Played")
|
||||
or episode["UserData"].get("PlaybackPositionTicks", 0)
|
||||
> 600000000
|
||||
):
|
||||
episode_mediaitem.append(
|
||||
get_mediaitem(
|
||||
self.server_type,
|
||||
episode,
|
||||
self.generate_guids,
|
||||
self.generate_locations,
|
||||
)
|
||||
)
|
||||
|
||||
if episode_mediaitem:
|
||||
watched.series.append(
|
||||
Series(
|
||||
identifiers=MediaIdentifiers(
|
||||
title=show.get("Name"),
|
||||
locations=show_locations,
|
||||
imdb_id=show_guids.get("imdb"),
|
||||
tvdb_id=show_guids.get("tvdb"),
|
||||
tmdb_id=show_guids.get("tmdb"),
|
||||
),
|
||||
episodes=episode_mediaitem,
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"{self.server_type}: Finished getting watched for {user_name} in library {library_title}",
|
||||
)
|
||||
|
||||
return watched
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"{self.server_type}: Failed to get watched for {user_name} in library {library_title}, Error: {e}",
|
||||
)
|
||||
|
||||
logger.error(traceback.format_exc())
|
||||
return LibraryData(title=library_title)
|
||||
|
||||
def get_watched(
|
||||
self,
|
||||
users: dict[str, str],
|
||||
sync_libraries: list[str],
|
||||
users_watched: dict[str, UserData] = None,
|
||||
) -> dict[str, UserData]:
|
||||
try:
|
||||
if not users_watched:
|
||||
users_watched: dict[str, UserData] = {}
|
||||
|
||||
for user_name, user_id in users.items():
|
||||
if user_name.lower() not in users_watched:
|
||||
users_watched[user_name.lower()] = UserData()
|
||||
|
||||
all_libraries = self.query(f"/Users/{user_id}/Views", "get")
|
||||
if not all_libraries or not isinstance(all_libraries, dict):
|
||||
logger.debug(
|
||||
f"{self.server_type}: Failed to get all libraries for {user_name}"
|
||||
)
|
||||
continue
|
||||
|
||||
for library in all_libraries.get("Items", []):
|
||||
library_id = library.get("Id")
|
||||
library_title = library.get("Name")
|
||||
library_type = library.get("CollectionType")
|
||||
|
||||
if not library_id or not library_title or not library_type:
|
||||
logger.debug(
|
||||
f"{self.server_type}: Failed to get library data for {user_name} {library_title}"
|
||||
)
|
||||
continue
|
||||
|
||||
if library_title not in sync_libraries:
|
||||
continue
|
||||
|
||||
if library_title in users_watched:
|
||||
logger.info(
|
||||
f"{self.server_type}: {user_name} {library_title} watched history has already been gathered, skipping"
|
||||
)
|
||||
continue
|
||||
|
||||
# Get watched for user
|
||||
library_data = self.get_user_library_watched(
|
||||
user_name,
|
||||
user_id,
|
||||
library_type,
|
||||
library_id,
|
||||
library_title,
|
||||
)
|
||||
|
||||
if user_name.lower() not in users_watched:
|
||||
users_watched[user_name.lower()] = UserData()
|
||||
|
||||
users_watched[user_name.lower()].libraries[library_title] = (
|
||||
library_data
|
||||
)
|
||||
|
||||
return users_watched
|
||||
except Exception as e:
|
||||
logger.error(f"{self.server_type}: Failed to get watched, Error: {e}")
|
||||
return {}
|
||||
|
||||
def update_user_watched(
|
||||
self,
|
||||
user_name: str,
|
||||
user_id: str,
|
||||
library_data: LibraryData,
|
||||
library_name: str,
|
||||
library_id: str,
|
||||
dryrun: bool,
|
||||
) -> None:
|
||||
try:
|
||||
# If there are no movies or shows to update, exit early.
|
||||
if not library_data.series and not library_data.movies:
|
||||
return
|
||||
|
||||
logger.info(
|
||||
f"{self.server_type}: Updating watched for {user_name} in library {library_name}",
|
||||
)
|
||||
|
||||
# Update movies.
|
||||
if library_data.movies:
|
||||
jellyfin_search = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=True&ParentId={library_id}"
|
||||
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources&IncludeItemTypes=Movie",
|
||||
"get",
|
||||
)
|
||||
|
||||
if not jellyfin_search or not isinstance(jellyfin_search, dict):
|
||||
logger.debug(
|
||||
f"{self.server_type}: Failed to get movies for {user_name} {library_name}"
|
||||
)
|
||||
return
|
||||
|
||||
for jellyfin_video in jellyfin_search.get("Items", []):
|
||||
jelly_identifiers = extract_identifiers_from_item(
|
||||
self.server_type,
|
||||
jellyfin_video,
|
||||
self.generate_guids,
|
||||
self.generate_locations,
|
||||
)
|
||||
# Check each stored movie for a match.
|
||||
for stored_movie in library_data.movies:
|
||||
if check_same_identifiers(
|
||||
jelly_identifiers, stored_movie.identifiers
|
||||
):
|
||||
jellyfin_video_id = jellyfin_video.get("Id")
|
||||
|
||||
viewed_date: str = (
|
||||
stored_movie.status.viewed_date.isoformat(
|
||||
timespec="milliseconds"
|
||||
).replace("+00:00", "Z")
|
||||
)
|
||||
|
||||
if stored_movie.status.completed:
|
||||
msg = f"{self.server_type}: {jellyfin_video.get('Name')} as watched for {user_name} in {library_name}"
|
||||
if not dryrun:
|
||||
user_data_payload: dict[
|
||||
str, float | bool | datetime
|
||||
] = {
|
||||
"PlayCount": 1,
|
||||
"Played": True,
|
||||
"PlaybackPositionTicks": 0,
|
||||
"LastPlayedDate": viewed_date,
|
||||
}
|
||||
self.query(
|
||||
f"/Users/{user_id}/Items/{jellyfin_video_id}/UserData",
|
||||
"post",
|
||||
json=user_data_payload,
|
||||
)
|
||||
|
||||
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
|
||||
log_marked(
|
||||
self.server_type,
|
||||
self.server_name,
|
||||
user_name,
|
||||
library_name,
|
||||
jellyfin_video.get("Name"),
|
||||
mark_file=get_env_value(
|
||||
self.env, "MARK_FILE", "mark.log"
|
||||
),
|
||||
)
|
||||
elif self.update_partial:
|
||||
msg = f"{self.server_type}: {jellyfin_video.get('Name')} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user_name} in {library_name}"
|
||||
|
||||
if not dryrun:
|
||||
user_data_payload: dict[
|
||||
str, float | bool | datetime
|
||||
] = {
|
||||
"PlayCount": 0,
|
||||
"Played": False,
|
||||
"PlaybackPositionTicks": stored_movie.status.time
|
||||
* 10_000,
|
||||
"LastPlayedDate": viewed_date,
|
||||
}
|
||||
self.query(
|
||||
f"/Users/{user_id}/Items/{jellyfin_video_id}/UserData",
|
||||
"post",
|
||||
json=user_data_payload,
|
||||
)
|
||||
|
||||
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
|
||||
log_marked(
|
||||
self.server_type,
|
||||
self.server_name,
|
||||
user_name,
|
||||
library_name,
|
||||
jellyfin_video.get("Name"),
|
||||
duration=floor(stored_movie.status.time / 60_000),
|
||||
mark_file=get_env_value(
|
||||
self.env, "MARK_FILE", "mark.log"
|
||||
),
|
||||
)
|
||||
else:
|
||||
logger.trace(
|
||||
f"{self.server_type}: Skipping movie {jellyfin_video.get('Name')} as it is not in mark list for {user_name}",
|
||||
)
|
||||
|
||||
# Update TV Shows (series/episodes).
|
||||
if library_data.series:
|
||||
jellyfin_search = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=True&ParentId={library_id}"
|
||||
+ "&Fields=ItemCounts,ProviderIds,Path&IncludeItemTypes=Series",
|
||||
"get",
|
||||
)
|
||||
if not jellyfin_search or not isinstance(jellyfin_search, dict):
|
||||
logger.debug(
|
||||
f"{self.server_type}: Failed to get shows for {user_name} {library_name}"
|
||||
)
|
||||
return
|
||||
|
||||
jellyfin_shows = [x for x in jellyfin_search.get("Items", [])]
|
||||
|
||||
for jellyfin_show in jellyfin_shows:
|
||||
jellyfin_show_identifiers = extract_identifiers_from_item(
|
||||
self.server_type,
|
||||
jellyfin_show,
|
||||
self.generate_guids,
|
||||
self.generate_locations,
|
||||
)
|
||||
# Try to find a matching series in your stored library.
|
||||
for stored_series in library_data.series:
|
||||
if check_same_identifiers(
|
||||
jellyfin_show_identifiers, stored_series.identifiers
|
||||
):
|
||||
logger.trace(
|
||||
f"Found matching show for '{jellyfin_show.get('Name')}'",
|
||||
)
|
||||
# Now update episodes.
|
||||
# Get the list of Plex episodes for this show.
|
||||
jellyfin_show_id = jellyfin_show.get("Id")
|
||||
jellyfin_episodes = self.query(
|
||||
f"/Shows/{jellyfin_show_id}/Episodes"
|
||||
+ f"?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||
"get",
|
||||
)
|
||||
|
||||
if not jellyfin_episodes or not isinstance(
|
||||
jellyfin_episodes, dict
|
||||
):
|
||||
logger.debug(
|
||||
f"{self.server_type}: Failed to get episodes for {user_name} {library_name} {jellyfin_show.get('Name')}"
|
||||
)
|
||||
return
|
||||
|
||||
for jellyfin_episode in jellyfin_episodes.get("Items", []):
|
||||
jellyfin_episode_identifiers = (
|
||||
extract_identifiers_from_item(
|
||||
self.server_type,
|
||||
jellyfin_episode,
|
||||
self.generate_guids,
|
||||
self.generate_locations,
|
||||
)
|
||||
)
|
||||
for stored_ep in stored_series.episodes:
|
||||
if check_same_identifiers(
|
||||
jellyfin_episode_identifiers,
|
||||
stored_ep.identifiers,
|
||||
):
|
||||
jellyfin_episode_id = jellyfin_episode.get("Id")
|
||||
|
||||
viewed_date: str = (
|
||||
stored_ep.status.viewed_date.isoformat(
|
||||
timespec="milliseconds"
|
||||
).replace("+00:00", "Z")
|
||||
)
|
||||
|
||||
if stored_ep.status.completed:
|
||||
msg = (
|
||||
f"{self.server_type}: {jellyfin_episode.get('SeriesName')} {jellyfin_episode.get('SeasonName')} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}"
|
||||
+ f" as watched for {user_name} in {library_name}"
|
||||
)
|
||||
if not dryrun:
|
||||
user_data_payload: dict[
|
||||
str, float | bool | datetime
|
||||
] = {
|
||||
"PlayCount": 1,
|
||||
"Played": True,
|
||||
"PlaybackPositionTicks": 0,
|
||||
"LastPlayedDate": viewed_date,
|
||||
}
|
||||
self.query(
|
||||
f"/Users/{user_id}/Items/{jellyfin_episode_id}/UserData",
|
||||
"post",
|
||||
json=user_data_payload,
|
||||
)
|
||||
|
||||
logger.success(
|
||||
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
|
||||
)
|
||||
log_marked(
|
||||
self.server_type,
|
||||
self.server_name,
|
||||
user_name,
|
||||
library_name,
|
||||
jellyfin_episode.get("SeriesName"),
|
||||
jellyfin_episode.get("Name"),
|
||||
mark_file=get_env_value(
|
||||
self.env, "MARK_FILE", "mark.log"
|
||||
),
|
||||
)
|
||||
elif self.update_partial:
|
||||
msg = (
|
||||
f"{self.server_type}: {jellyfin_episode.get('SeriesName')} {jellyfin_episode.get('SeasonName')} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}"
|
||||
+ f" as partially watched for {floor(stored_ep.status.time / 60_000)} minutes for {user_name} in {library_name}"
|
||||
)
|
||||
|
||||
if not dryrun:
|
||||
user_data_payload: dict[
|
||||
str, float | bool | datetime
|
||||
] = {
|
||||
"PlayCount": 0,
|
||||
"Played": False,
|
||||
"PlaybackPositionTicks": stored_ep.status.time
|
||||
* 10_000,
|
||||
"LastPlayedDate": viewed_date,
|
||||
}
|
||||
self.query(
|
||||
f"/Users/{user_id}/Items/{jellyfin_episode_id}/UserData",
|
||||
"post",
|
||||
json=user_data_payload,
|
||||
)
|
||||
|
||||
logger.success(
|
||||
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
|
||||
)
|
||||
log_marked(
|
||||
self.server_type,
|
||||
self.server_name,
|
||||
user_name,
|
||||
library_name,
|
||||
jellyfin_episode.get("SeriesName"),
|
||||
jellyfin_episode.get("Name"),
|
||||
duration=floor(
|
||||
stored_ep.status.time / 60_000
|
||||
),
|
||||
mark_file=get_env_value(
|
||||
self.env, "MARK_FILE", "mark.log"
|
||||
),
|
||||
)
|
||||
else:
|
||||
logger.trace(
|
||||
f"{self.server_type}: Skipping episode {jellyfin_episode.get('Name')} as it is not in mark list for {user_name}",
|
||||
)
|
||||
else:
|
||||
logger.trace(
|
||||
f"{self.server_type}: Skipping show {jellyfin_show.get('Name')} as it is not in mark list for {user_name}",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"{self.server_type}: Error updating watched for {user_name} in library {library_name}, {e}",
|
||||
)
|
||||
|
||||
def update_watched(
|
||||
self,
|
||||
watched_list: dict[str, UserData],
|
||||
user_mapping: dict[str, str] | None = None,
|
||||
library_mapping: dict[str, str] | None = None,
|
||||
dryrun: bool = False,
|
||||
) -> None:
|
||||
for user, user_data in watched_list.items():
|
||||
user_other = None
|
||||
user_name = None
|
||||
if user_mapping:
|
||||
if user in user_mapping.keys():
|
||||
user_other = user_mapping[user]
|
||||
elif user in user_mapping.values():
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
|
||||
user_id = None
|
||||
for key in self.users:
|
||||
if user.lower() == key.lower():
|
||||
user_id = self.users[key]
|
||||
user_name = key
|
||||
break
|
||||
elif user_other and user_other.lower() == key.lower():
|
||||
user_id = self.users[key]
|
||||
user_name = key
|
||||
break
|
||||
|
||||
if not user_id or not user_name:
|
||||
logger.info(f"{user} {user_other} not found in Jellyfin")
|
||||
continue
|
||||
|
||||
jellyfin_libraries = self.query(
|
||||
f"/Users/{user_id}/Views",
|
||||
"get",
|
||||
)
|
||||
|
||||
if not jellyfin_libraries or not isinstance(jellyfin_libraries, dict):
|
||||
logger.debug(
|
||||
f"{self.server_type}: Failed to get libraries for {user_name}"
|
||||
)
|
||||
continue
|
||||
|
||||
jellyfin_libraries = [x for x in jellyfin_libraries.get("Items", [])]
|
||||
|
||||
for library_name in user_data.libraries:
|
||||
library_data = user_data.libraries[library_name]
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
if library_name in library_mapping.keys():
|
||||
library_other = library_mapping[library_name]
|
||||
elif library_name in library_mapping.values():
|
||||
library_other = search_mapping(library_mapping, library_name)
|
||||
|
||||
if library_name.lower() not in [
|
||||
x["Name"].lower() for x in jellyfin_libraries
|
||||
]:
|
||||
if library_other:
|
||||
if library_other.lower() in [
|
||||
x["Name"].lower() for x in jellyfin_libraries
|
||||
]:
|
||||
logger.info(
|
||||
f"{self.server_type}: Library {library_name} not found, but {library_other} found, using {library_other}",
|
||||
)
|
||||
library_name = library_other
|
||||
else:
|
||||
logger.info(
|
||||
f"{self.server_type}: Library {library_name} or {library_other} not found in library list",
|
||||
)
|
||||
continue
|
||||
else:
|
||||
logger.info(
|
||||
f"{self.server_type}: Library {library_name} not found in library list",
|
||||
)
|
||||
continue
|
||||
|
||||
library_id = None
|
||||
for jellyfin_library in jellyfin_libraries:
|
||||
if jellyfin_library["Name"].lower() == library_name.lower():
|
||||
library_id = jellyfin_library["Id"]
|
||||
continue
|
||||
|
||||
if library_id:
|
||||
try:
|
||||
self.update_user_watched(
|
||||
user_name,
|
||||
user_id,
|
||||
library_data,
|
||||
library_name,
|
||||
library_id,
|
||||
dryrun,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"{self.server_type}: Error updating watched for {user_name} in library {library_name}, {e}",
|
||||
)
|
||||
|
|
@ -0,0 +1,207 @@
|
|||
from loguru import logger
|
||||
|
||||
from src.functions import (
|
||||
match_list,
|
||||
search_mapping,
|
||||
)
|
||||
|
||||
from src.emby import Emby
|
||||
from src.jellyfin import Jellyfin
|
||||
from src.plex import Plex
|
||||
|
||||
|
||||
def check_skip_logic(
|
||||
library_title: str,
|
||||
library_type: str,
|
||||
blacklist_library: list[str],
|
||||
whitelist_library: list[str],
|
||||
blacklist_library_type: list[str],
|
||||
whitelist_library_type: list[str],
|
||||
library_mapping: dict[str, str] | None = None,
|
||||
) -> str | None:
|
||||
skip_reason = None
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
|
||||
skip_reason_black = check_blacklist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
library_other,
|
||||
)
|
||||
skip_reason_white = check_whitelist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
# Combine skip reasons
|
||||
if skip_reason_black:
|
||||
skip_reason = skip_reason_black
|
||||
|
||||
if skip_reason_white:
|
||||
if skip_reason:
|
||||
skip_reason = skip_reason + " and " + skip_reason_white
|
||||
else:
|
||||
skip_reason = skip_reason_white
|
||||
|
||||
return skip_reason
|
||||
|
||||
|
||||
def check_blacklist_logic(
|
||||
library_title: str,
|
||||
library_type: str,
|
||||
blacklist_library: list[str],
|
||||
blacklist_library_type: list[str],
|
||||
library_other: str | None = None,
|
||||
) -> str | None:
|
||||
skip_reason = None
|
||||
if isinstance(library_type, (list, tuple, set)):
|
||||
for library_type_item in library_type:
|
||||
if library_type_item.lower() in blacklist_library_type:
|
||||
skip_reason = f"{library_type_item} is in blacklist_library_type"
|
||||
else:
|
||||
if library_type.lower() in blacklist_library_type:
|
||||
skip_reason = f"{library_type} is in blacklist_library_type"
|
||||
|
||||
if library_title.lower() in [x.lower() for x in blacklist_library]:
|
||||
if skip_reason:
|
||||
skip_reason = (
|
||||
skip_reason + " and " + f"{library_title} is in blacklist_library"
|
||||
)
|
||||
else:
|
||||
skip_reason = f"{library_title} is in blacklist_library"
|
||||
|
||||
if library_other:
|
||||
if library_other.lower() in [x.lower() for x in blacklist_library]:
|
||||
if skip_reason:
|
||||
skip_reason = (
|
||||
skip_reason + " and " + f"{library_other} is in blacklist_library"
|
||||
)
|
||||
else:
|
||||
skip_reason = f"{library_other} is in blacklist_library"
|
||||
|
||||
return skip_reason
|
||||
|
||||
|
||||
def check_whitelist_logic(
|
||||
library_title: str,
|
||||
library_type: str,
|
||||
whitelist_library: list[str],
|
||||
whitelist_library_type: list[str],
|
||||
library_other: str | None = None,
|
||||
) -> str | None:
|
||||
skip_reason = None
|
||||
if len(whitelist_library_type) > 0:
|
||||
if isinstance(library_type, (list, tuple, set)):
|
||||
for library_type_item in library_type:
|
||||
if library_type_item.lower() not in whitelist_library_type:
|
||||
skip_reason = (
|
||||
f"{library_type_item} is not in whitelist_library_type"
|
||||
)
|
||||
else:
|
||||
if library_type.lower() not in whitelist_library_type:
|
||||
skip_reason = f"{library_type} is not in whitelist_library_type"
|
||||
|
||||
# if whitelist is not empty and library is not in whitelist
|
||||
if len(whitelist_library) > 0:
|
||||
if library_other:
|
||||
if library_title.lower() not in [
|
||||
x.lower() for x in whitelist_library
|
||||
] and library_other.lower() not in [x.lower() for x in whitelist_library]:
|
||||
if skip_reason:
|
||||
skip_reason = (
|
||||
skip_reason
|
||||
+ " and "
|
||||
+ f"{library_title} is not in whitelist_library"
|
||||
)
|
||||
else:
|
||||
skip_reason = f"{library_title} is not in whitelist_library"
|
||||
else:
|
||||
if library_title.lower() not in [x.lower() for x in whitelist_library]:
|
||||
if skip_reason:
|
||||
skip_reason = (
|
||||
skip_reason
|
||||
+ " and "
|
||||
+ f"{library_title} is not in whitelist_library"
|
||||
)
|
||||
else:
|
||||
skip_reason = f"{library_title} is not in whitelist_library"
|
||||
|
||||
return skip_reason
|
||||
|
||||
|
||||
def filter_libaries(
|
||||
server_libraries: dict[str, str],
|
||||
blacklist_library: list[str],
|
||||
blacklist_library_type: list[str],
|
||||
whitelist_library: list[str],
|
||||
whitelist_library_type: list[str],
|
||||
library_mapping: dict[str, str] | None = None,
|
||||
) -> list[str]:
|
||||
filtered_libaries: list[str] = []
|
||||
for library in server_libraries:
|
||||
skip_reason = check_skip_logic(
|
||||
library,
|
||||
server_libraries[library],
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
if skip_reason:
|
||||
logger.info(f"Skipping library {library}: {skip_reason}")
|
||||
continue
|
||||
|
||||
filtered_libaries.append(library)
|
||||
|
||||
return filtered_libaries
|
||||
|
||||
|
||||
def setup_libraries(
|
||||
server_1: Plex | Jellyfin | Emby,
|
||||
server_2: Plex | Jellyfin | Emby,
|
||||
blacklist_library: list[str],
|
||||
blacklist_library_type: list[str],
|
||||
whitelist_library: list[str],
|
||||
whitelist_library_type: list[str],
|
||||
library_mapping: dict[str, str] | None = None,
|
||||
) -> tuple[list[str], list[str]]:
|
||||
server_1_libraries = server_1.get_libraries()
|
||||
server_2_libraries = server_2.get_libraries()
|
||||
|
||||
logger.debug(f"{server_1.server_type}: Libraries and types {server_1_libraries}")
|
||||
logger.debug(f"{server_2.server_type}: Libraries and types {server_2_libraries}")
|
||||
|
||||
# Filter out all blacklist, whitelist libaries
|
||||
filtered_server_1_libraries = filter_libaries(
|
||||
server_1_libraries,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
filtered_server_2_libraries = filter_libaries(
|
||||
server_2_libraries,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
output_server_1_libaries = match_list(
|
||||
filtered_server_1_libraries, filtered_server_2_libraries, library_mapping
|
||||
)
|
||||
output_server_2_libaries = match_list(
|
||||
filtered_server_2_libraries, filtered_server_1_libraries, library_mapping
|
||||
)
|
||||
|
||||
return output_server_1_libaries, output_server_2_libaries
|
||||
764
src/main.py
764
src/main.py
|
|
@ -1,441 +1,323 @@
|
|||
import copy, os, traceback, json
|
||||
from dotenv import load_dotenv
|
||||
from time import sleep
|
||||
|
||||
from src.functions import logger, str_to_bool, search_mapping, generate_library_guids_dict, future_thread_executor
|
||||
from src.plex import Plex
|
||||
from src.jellyfin import Jellyfin
|
||||
|
||||
load_dotenv(override=True)
|
||||
|
||||
def cleanup_watched(watched_list_1, watched_list_2, user_mapping=None, library_mapping=None):
|
||||
modified_watched_list_1 = copy.deepcopy(watched_list_1)
|
||||
|
||||
# remove entries from plex_watched that are in jellyfin_watched
|
||||
for user_1 in watched_list_1:
|
||||
user_other = None
|
||||
if user_mapping:
|
||||
user_other = search_mapping(user_mapping, user_1)
|
||||
if user_1 in modified_watched_list_1:
|
||||
if user_1 in watched_list_2:
|
||||
user_2 = user_1
|
||||
elif user_other in watched_list_2:
|
||||
user_2 = user_other
|
||||
else:
|
||||
logger(f"User {user_1} and {user_other} not found in watched list 2", 1)
|
||||
continue
|
||||
|
||||
for library_1 in watched_list_1[user_1]:
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
library_other = search_mapping(library_mapping, library_1)
|
||||
if library_1 in modified_watched_list_1[user_1]:
|
||||
if library_1 in watched_list_2[user_2]:
|
||||
library_2 = library_1
|
||||
elif library_other in watched_list_2[user_2]:
|
||||
library_2 = library_other
|
||||
else:
|
||||
logger(f"library {library_1} and {library_other} not found in watched list 2", 1)
|
||||
continue
|
||||
|
||||
# Movies
|
||||
if isinstance(watched_list_1[user_1][library_1], list):
|
||||
_, _, movies_watched_list_2_keys_dict = generate_library_guids_dict(watched_list_2[user_2][library_2], 2)
|
||||
for movie in watched_list_1[user_1][library_1]:
|
||||
movie_found = False
|
||||
for movie_key, movie_value in movie.items():
|
||||
if movie_key == "locations":
|
||||
for location in movie_value:
|
||||
if location in movies_watched_list_2_keys_dict["locations"]:
|
||||
movie_found = True
|
||||
break
|
||||
else:
|
||||
if movie_key in movies_watched_list_2_keys_dict.keys():
|
||||
if movie_value in movies_watched_list_2_keys_dict[movie_key]:
|
||||
movie_found = True
|
||||
|
||||
if movie_found:
|
||||
logger(f"Removing {movie} from {library_1}", 3)
|
||||
modified_watched_list_1[user_1][library_1].remove(movie)
|
||||
break
|
||||
|
||||
|
||||
# TV Shows
|
||||
elif isinstance(watched_list_1[user_1][library_1], dict):
|
||||
# Generate full list of provider ids for episodes in watch_list_2 to easily compare if they exist in watch_list_1
|
||||
show_watched_list_2_keys_dict, episode_watched_list_2_keys_dict, _ = generate_library_guids_dict(watched_list_2[user_2][library_2], 3)
|
||||
|
||||
for show_key_1 in watched_list_1[user_1][library_1].keys():
|
||||
show_key_dict = dict(show_key_1)
|
||||
for season in watched_list_1[user_1][library_1][show_key_1]:
|
||||
for episode in watched_list_1[user_1][library_1][show_key_1][season]:
|
||||
episode_found = False
|
||||
for episode_key, episode_value in episode.items():
|
||||
# If episode_key and episode_value are in episode_watched_list_2_keys_dict exactly, then remove from watch_list_1
|
||||
if episode_key == "locations":
|
||||
for location in episode_value:
|
||||
if location in episode_watched_list_2_keys_dict["locations"]:
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
else:
|
||||
if episode_key in episode_watched_list_2_keys_dict.keys():
|
||||
if episode_value in episode_watched_list_2_keys_dict[episode_key]:
|
||||
episode_found = True
|
||||
|
||||
if episode_found:
|
||||
if episode in modified_watched_list_1[user_1][library_1][show_key_1][season]:
|
||||
logger(f"Removing {show_key_dict['title']} {episode} from {library_1}", 3)
|
||||
modified_watched_list_1[user_1][library_1][show_key_1][season].remove(episode)
|
||||
break
|
||||
|
||||
# Remove empty seasons
|
||||
if len(modified_watched_list_1[user_1][library_1][show_key_1][season]) == 0:
|
||||
if season in modified_watched_list_1[user_1][library_1][show_key_1]:
|
||||
logger(f"Removing {season} from {library_1} because it is empty", 3)
|
||||
del modified_watched_list_1[user_1][library_1][show_key_1][season]
|
||||
|
||||
# If the show is empty, remove the show
|
||||
if len(modified_watched_list_1[user_1][library_1][show_key_1]) == 0:
|
||||
if show_key_1 in modified_watched_list_1[user_1][library_1]:
|
||||
logger(f"Removing {show_key_dict['title']} from {library_1} because it is empty", 1)
|
||||
del modified_watched_list_1[user_1][library_1][show_key_1]
|
||||
|
||||
for user_1 in watched_list_1:
|
||||
for library_1 in watched_list_1[user_1]:
|
||||
if library_1 in modified_watched_list_1[user_1]:
|
||||
# If library is empty then remove it
|
||||
if len(modified_watched_list_1[user_1][library_1]) == 0:
|
||||
logger(f"Removing {library_1} from {user_1} because it is empty", 1)
|
||||
del modified_watched_list_1[user_1][library_1]
|
||||
|
||||
if user_1 in modified_watched_list_1:
|
||||
# If user is empty delete user
|
||||
if len(modified_watched_list_1[user_1]) == 0:
|
||||
logger(f"Removing {user_1} from watched list 1 because it is empty", 1)
|
||||
del modified_watched_list_1[user_1]
|
||||
|
||||
return modified_watched_list_1
|
||||
|
||||
def setup_black_white_lists(blacklist_library: str, whitelist_library: str, blacklist_library_type: str, whitelist_library_type: str, blacklist_users: str, whitelist_users: str, library_mapping=None, user_mapping=None):
|
||||
if blacklist_library:
|
||||
if len(blacklist_library) > 0:
|
||||
blacklist_library = blacklist_library.split(",")
|
||||
blacklist_library = [x.strip() for x in blacklist_library]
|
||||
if library_mapping:
|
||||
temp_library = []
|
||||
for library in blacklist_library:
|
||||
library_other = search_mapping(library_mapping, library)
|
||||
if library_other:
|
||||
temp_library.append(library_other)
|
||||
|
||||
blacklist_library = blacklist_library + temp_library
|
||||
else:
|
||||
blacklist_library = []
|
||||
logger(f"Blacklist Library: {blacklist_library}", 1)
|
||||
|
||||
if whitelist_library:
|
||||
if len(whitelist_library) > 0:
|
||||
whitelist_library = whitelist_library.split(",")
|
||||
whitelist_library = [x.strip() for x in whitelist_library]
|
||||
if library_mapping:
|
||||
temp_library = []
|
||||
for library in whitelist_library:
|
||||
library_other = search_mapping(library_mapping, library)
|
||||
if library_other:
|
||||
temp_library.append(library_other)
|
||||
|
||||
whitelist_library = whitelist_library + temp_library
|
||||
else:
|
||||
whitelist_library = []
|
||||
logger(f"Whitelist Library: {whitelist_library}", 1)
|
||||
|
||||
if blacklist_library_type:
|
||||
if len(blacklist_library_type) > 0:
|
||||
blacklist_library_type = blacklist_library_type.split(",")
|
||||
blacklist_library_type = [x.lower().strip() for x in blacklist_library_type]
|
||||
else:
|
||||
blacklist_library_type = []
|
||||
logger(f"Blacklist Library Type: {blacklist_library_type}", 1)
|
||||
|
||||
if whitelist_library_type:
|
||||
if len(whitelist_library_type) > 0:
|
||||
whitelist_library_type = whitelist_library_type.split(",")
|
||||
whitelist_library_type = [x.lower().strip() for x in whitelist_library_type]
|
||||
else:
|
||||
whitelist_library_type = []
|
||||
logger(f"Whitelist Library Type: {whitelist_library_type}", 1)
|
||||
|
||||
if blacklist_users:
|
||||
if len(blacklist_users) > 0:
|
||||
blacklist_users = blacklist_users.split(",")
|
||||
blacklist_users = [x.lower().strip() for x in blacklist_users]
|
||||
if user_mapping:
|
||||
temp_users = []
|
||||
for user in blacklist_users:
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
if user_other:
|
||||
temp_users.append(user_other)
|
||||
|
||||
blacklist_users = blacklist_users + temp_users
|
||||
else:
|
||||
blacklist_users = []
|
||||
logger(f"Blacklist Users: {blacklist_users}", 1)
|
||||
|
||||
if whitelist_users:
|
||||
if len(whitelist_users) > 0:
|
||||
whitelist_users = whitelist_users.split(",")
|
||||
whitelist_users = [x.lower().strip() for x in whitelist_users]
|
||||
if user_mapping:
|
||||
temp_users = []
|
||||
for user in whitelist_users:
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
if user_other:
|
||||
temp_users.append(user_other)
|
||||
|
||||
whitelist_users = whitelist_users + temp_users
|
||||
else:
|
||||
whitelist_users = []
|
||||
else:
|
||||
whitelist_users = []
|
||||
logger(f"Whitelist Users: {whitelist_users}", 1)
|
||||
|
||||
return blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, blacklist_users, whitelist_users
|
||||
|
||||
def setup_users(server_1, server_2, blacklist_users, whitelist_users, user_mapping=None):
|
||||
|
||||
# generate list of users from server 1 and server 2
|
||||
server_1_type = server_1[0]
|
||||
server_1_connection = server_1[1]
|
||||
server_2_type = server_2[0]
|
||||
server_2_connection = server_2[1]
|
||||
|
||||
server_1_users = []
|
||||
if server_1_type == "plex":
|
||||
server_1_users = [ x.title.lower() for x in server_1_connection.users ]
|
||||
elif server_1_type == "jellyfin":
|
||||
server_1_users = [ key.lower() for key in server_1_connection.users.keys() ]
|
||||
|
||||
server_2_users = []
|
||||
if server_2_type == "plex":
|
||||
server_2_users = [ x.title.lower() for x in server_2_connection.users ]
|
||||
elif server_2_type == "jellyfin":
|
||||
server_2_users = [ key.lower() for key in server_2_connection.users.keys() ]
|
||||
|
||||
|
||||
# combined list of overlapping users from plex and jellyfin
|
||||
users = {}
|
||||
|
||||
for server_1_user in server_1_users:
|
||||
if user_mapping:
|
||||
jellyfin_plex_mapped_user = search_mapping(user_mapping, server_1_user)
|
||||
if jellyfin_plex_mapped_user:
|
||||
users[server_1_user] = jellyfin_plex_mapped_user
|
||||
continue
|
||||
|
||||
if server_1_user in server_2_users:
|
||||
users[server_1_user] = server_1_user
|
||||
|
||||
for server_2_user in server_2_users:
|
||||
if user_mapping:
|
||||
plex_jellyfin_mapped_user = search_mapping(user_mapping, server_2_user)
|
||||
if plex_jellyfin_mapped_user:
|
||||
users[plex_jellyfin_mapped_user] = server_2_user
|
||||
continue
|
||||
|
||||
if server_2_user in server_1_users:
|
||||
users[server_2_user] = server_2_user
|
||||
|
||||
logger(f"User list that exist on both servers {users}", 1)
|
||||
|
||||
users_filtered = {}
|
||||
for user in users:
|
||||
# whitelist_user is not empty and user lowercase is not in whitelist lowercase
|
||||
if len(whitelist_users) > 0:
|
||||
if user not in whitelist_users and users[user] not in whitelist_users:
|
||||
logger(f"{user} or {users[user]} is not in whitelist", 1)
|
||||
continue
|
||||
|
||||
if user not in blacklist_users and users[user] not in blacklist_users:
|
||||
users_filtered[user] = users[user]
|
||||
|
||||
logger(f"Filtered user list {users_filtered}", 1)
|
||||
|
||||
if server_1_type == "plex":
|
||||
output_server_1_users = []
|
||||
for plex_user in server_1_connection.users:
|
||||
if plex_user.title.lower() in users_filtered.keys() or plex_user.title.lower() in users_filtered.values():
|
||||
output_server_1_users.append(plex_user)
|
||||
elif server_1_type == "jellyfin":
|
||||
output_server_1_users = {}
|
||||
for jellyfin_user, jellyfin_id in server_1_connection.users.items():
|
||||
if jellyfin_user.lower() in users_filtered.keys() or jellyfin_user.lower() in users_filtered.values():
|
||||
output_server_1_users[jellyfin_user] = jellyfin_id
|
||||
|
||||
if server_2_type == "plex":
|
||||
output_server_2_users = []
|
||||
for plex_user in server_2_connection.users:
|
||||
if plex_user.title.lower() in users_filtered.keys() or plex_user.title.lower() in users_filtered.values():
|
||||
output_server_2_users.append(plex_user)
|
||||
elif server_2_type == "jellyfin":
|
||||
output_server_2_users = {}
|
||||
for jellyfin_user, jellyfin_id in server_2_connection.users.items():
|
||||
if jellyfin_user.lower() in users_filtered.keys() or jellyfin_user.lower() in users_filtered.values():
|
||||
output_server_2_users[jellyfin_user] = jellyfin_id
|
||||
|
||||
if len(output_server_1_users) == 0:
|
||||
raise Exception(f"No users found for server 1, users found {users} filtered users {users_filtered}")
|
||||
|
||||
if len(output_server_2_users) == 0:
|
||||
raise Exception(f"No users found for server 2, users found {users} filtered users {users_filtered}")
|
||||
|
||||
logger(f"Server 1 users: {output_server_1_users}", 1)
|
||||
logger(f"Server 2 users: {output_server_2_users}", 1)
|
||||
|
||||
return output_server_1_users, output_server_2_users
|
||||
|
||||
def generate_server_connections():
|
||||
servers = []
|
||||
|
||||
plex_baseurl = os.getenv("PLEX_BASEURL", None)
|
||||
plex_token = os.getenv("PLEX_TOKEN", None)
|
||||
plex_username = os.getenv("PLEX_USERNAME", None)
|
||||
plex_password = os.getenv("PLEX_PASSWORD", None)
|
||||
plex_servername = os.getenv("PLEX_SERVERNAME", None)
|
||||
|
||||
if plex_baseurl and plex_token:
|
||||
plex_baseurl = plex_baseurl.split(",")
|
||||
plex_token = plex_token.split(",")
|
||||
|
||||
if len(plex_baseurl) != len(plex_token):
|
||||
raise Exception("PLEX_BASEURL and PLEX_TOKEN must have the same number of entries")
|
||||
|
||||
for i, url in enumerate(plex_baseurl):
|
||||
servers.append(("plex", Plex(baseurl=url.strip(), token=plex_token[i].strip(), username=None, password=None, servername=None)))
|
||||
|
||||
if plex_username and plex_password and plex_servername:
|
||||
plex_username = plex_username.split(",")
|
||||
plex_password = plex_password.split(",")
|
||||
plex_servername = plex_servername.split(",")
|
||||
|
||||
if len(plex_username) != len(plex_password) or len(plex_username) != len(plex_servername):
|
||||
raise Exception("PLEX_USERNAME, PLEX_PASSWORD and PLEX_SERVERNAME must have the same number of entries")
|
||||
|
||||
for i, username in enumerate(plex_username):
|
||||
servers.append(("plex", Plex(baseurl=None, token=None, username=username.strip(), password=plex_password[i].strip(), servername=plex_servername[i].strip())))
|
||||
|
||||
jellyfin_baseurl = os.getenv("JELLYFIN_BASEURL", None)
|
||||
jellyfin_token = os.getenv("JELLYFIN_TOKEN", None)
|
||||
|
||||
if jellyfin_baseurl and jellyfin_token:
|
||||
jellyfin_baseurl = jellyfin_baseurl.split(",")
|
||||
jellyfin_token = jellyfin_token.split(",")
|
||||
|
||||
if len(jellyfin_baseurl) != len(jellyfin_token):
|
||||
raise Exception("JELLYFIN_BASEURL and JELLYFIN_TOKEN must have the same number of entries")
|
||||
|
||||
for i, baseurl in enumerate(jellyfin_baseurl):
|
||||
servers.append(("jellyfin", Jellyfin(baseurl=baseurl.strip(), token=jellyfin_token[i].strip())))
|
||||
|
||||
return servers
|
||||
|
||||
def main_loop():
|
||||
logfile = os.getenv("LOGFILE","log.log")
|
||||
# Delete logfile if it exists
|
||||
if os.path.exists(logfile):
|
||||
os.remove(logfile)
|
||||
|
||||
dryrun = str_to_bool(os.getenv("DRYRUN", "False"))
|
||||
logger(f"Dryrun: {dryrun}", 1)
|
||||
|
||||
user_mapping = os.getenv("USER_MAPPING")
|
||||
if user_mapping:
|
||||
user_mapping = json.loads(user_mapping.lower())
|
||||
logger(f"User Mapping: {user_mapping}", 1)
|
||||
|
||||
library_mapping = os.getenv("LIBRARY_MAPPING")
|
||||
if library_mapping:
|
||||
library_mapping = json.loads(library_mapping)
|
||||
logger(f"Library Mapping: {library_mapping}", 1)
|
||||
|
||||
# Create (black/white)lists
|
||||
logger("Creating (black/white)lists", 1)
|
||||
blacklist_library = os.getenv("BLACKLIST_LIBRARY", None)
|
||||
whitelist_library = os.getenv("WHITELIST_LIBRARY", None)
|
||||
blacklist_library_type = os.getenv("BLACKLIST_LIBRARY_TYPE", None)
|
||||
whitelist_library_type = os.getenv("WHITELIST_LIBRARY_TYPE", None)
|
||||
blacklist_users = os.getenv("BLACKLIST_USERS", None)
|
||||
whitelist_users = os.getenv("WHITELIST_USERS", None)
|
||||
|
||||
blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, blacklist_users, whitelist_users = setup_black_white_lists(blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, blacklist_users, whitelist_users, library_mapping, user_mapping)
|
||||
|
||||
# Create server connections
|
||||
logger("Creating server connections", 1)
|
||||
servers = generate_server_connections()
|
||||
|
||||
for server_1 in servers:
|
||||
# If server is the final server in the list, then we are done with the loop
|
||||
if server_1 == servers[-1]:
|
||||
break
|
||||
|
||||
# Start server_2 at the next server in the list
|
||||
for server_2 in servers[servers.index(server_1) + 1:]:
|
||||
|
||||
server_1_connection = server_1[1]
|
||||
server_2_connection = server_2[1]
|
||||
|
||||
# Create users list
|
||||
logger("Creating users list", 1)
|
||||
server_1_users, server_2_users = setup_users(server_1, server_2, blacklist_users, whitelist_users, user_mapping)
|
||||
|
||||
logger("Creating watched lists", 1)
|
||||
args = [[server_1_connection.get_watched, server_1_users, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping]
|
||||
, [server_2_connection.get_watched, server_2_users, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping]]
|
||||
|
||||
results = future_thread_executor(args)
|
||||
server_1_watched = results[0]
|
||||
server_2_watched = results[1]
|
||||
logger(f"Server 1 watched: {server_1_watched}", 3)
|
||||
logger(f"Server 2 watched: {server_2_watched}", 3)
|
||||
|
||||
# clone watched so it isnt modified in the cleanup function so all duplicates are actually removed
|
||||
server_1_watched_filtered = copy.deepcopy(server_1_watched)
|
||||
server_2_watched_filtered = copy.deepcopy(server_2_watched)
|
||||
|
||||
logger("Cleaning Server 1 Watched", 1)
|
||||
server_1_watched_filtered = cleanup_watched(server_1_watched, server_2_watched, user_mapping, library_mapping)
|
||||
|
||||
logger("Cleaning Server 2 Watched", 1)
|
||||
server_2_watched_filtered = cleanup_watched(server_2_watched, server_1_watched, user_mapping, library_mapping)
|
||||
|
||||
logger(f"server 1 watched that needs to be synced to server 2:\n{server_1_watched_filtered}", 1)
|
||||
logger(f"server 2 watched that needs to be synced to server 1:\n{server_2_watched_filtered}", 1)
|
||||
|
||||
args= [[server_1_connection.update_watched, server_2_watched_filtered, user_mapping, library_mapping, dryrun]
|
||||
, [server_2_connection.update_watched, server_1_watched_filtered, user_mapping, library_mapping, dryrun]]
|
||||
|
||||
future_thread_executor(args)
|
||||
|
||||
def main():
|
||||
sleep_duration = float(os.getenv("SLEEP_DURATION", "3600"))
|
||||
|
||||
while(True):
|
||||
try:
|
||||
main_loop()
|
||||
logger(f"Looping in {sleep_duration}")
|
||||
sleep(sleep_duration)
|
||||
except Exception as error:
|
||||
if isinstance(error, list):
|
||||
for message in error:
|
||||
logger(message, log_type=2)
|
||||
else:
|
||||
logger(error, log_type=2)
|
||||
|
||||
|
||||
logger(traceback.format_exc(), 2)
|
||||
logger(f"Retrying in {sleep_duration}", log_type=0)
|
||||
sleep(sleep_duration)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger("Exiting", log_type=0)
|
||||
os._exit(0)
|
||||
import os
|
||||
import traceback
|
||||
import json
|
||||
import sys
|
||||
from dotenv import dotenv_values
|
||||
from time import sleep, perf_counter
|
||||
from loguru import logger
|
||||
|
||||
from src.emby import Emby
|
||||
from src.jellyfin import Jellyfin
|
||||
from src.plex import Plex
|
||||
from src.library import setup_libraries
|
||||
from src.functions import (
|
||||
parse_string_to_list,
|
||||
str_to_bool,
|
||||
get_env_value,
|
||||
)
|
||||
from src.users import setup_users
|
||||
from src.watched import (
|
||||
cleanup_watched,
|
||||
merge_server_watched,
|
||||
)
|
||||
from src.black_white import setup_black_white_lists
|
||||
from src.connection import generate_server_connections
|
||||
|
||||
|
||||
def configure_logger(log_file: str = "log.log", debug_level: str = "INFO") -> None:
|
||||
# Remove default logger to configure our own
|
||||
logger.remove()
|
||||
|
||||
# Choose log level based on environment
|
||||
# If in debug mode with a "debug" level, use DEBUG; otherwise, default to INFO.
|
||||
|
||||
if debug_level not in ["INFO", "DEBUG", "TRACE"]:
|
||||
logger.add(sys.stdout)
|
||||
raise Exception(
|
||||
f"Invalid DEBUG_LEVEL {debug_level}, please choose between INFO, DEBUG, TRACE"
|
||||
)
|
||||
|
||||
# Add a sink for file logging and the console.
|
||||
logger.add(log_file, level=debug_level, mode="w")
|
||||
logger.add(sys.stdout, level=debug_level)
|
||||
|
||||
|
||||
def should_sync_server(
|
||||
env,
|
||||
server_1: Plex | Jellyfin | Emby,
|
||||
server_2: Plex | Jellyfin | Emby,
|
||||
) -> bool:
|
||||
sync_from_plex_to_jellyfin = str_to_bool(
|
||||
get_env_value(env, "SYNC_FROM_PLEX_TO_JELLYFIN", "True")
|
||||
)
|
||||
sync_from_plex_to_plex = str_to_bool(
|
||||
get_env_value(env, "SYNC_FROM_PLEX_TO_PLEX", "True")
|
||||
)
|
||||
sync_from_plex_to_emby = str_to_bool(
|
||||
get_env_value(env, "SYNC_FROM_PLEX_TO_EMBY", "True")
|
||||
)
|
||||
|
||||
sync_from_jelly_to_plex = str_to_bool(
|
||||
get_env_value(env, "SYNC_FROM_JELLYFIN_TO_PLEX", "True")
|
||||
)
|
||||
sync_from_jelly_to_jellyfin = str_to_bool(
|
||||
get_env_value(env, "SYNC_FROM_JELLYFIN_TO_JELLYFIN", "True")
|
||||
)
|
||||
sync_from_jelly_to_emby = str_to_bool(
|
||||
get_env_value(env, "SYNC_FROM_JELLYFIN_TO_EMBY", "True")
|
||||
)
|
||||
|
||||
sync_from_emby_to_plex = str_to_bool(
|
||||
get_env_value(env, "SYNC_FROM_EMBY_TO_PLEX", "True")
|
||||
)
|
||||
sync_from_emby_to_jellyfin = str_to_bool(
|
||||
get_env_value(env, "SYNC_FROM_EMBY_TO_JELLYFIN", "True")
|
||||
)
|
||||
sync_from_emby_to_emby = str_to_bool(
|
||||
get_env_value(env, "SYNC_FROM_EMBY_TO_EMBY", "True")
|
||||
)
|
||||
|
||||
if isinstance(server_1, Plex):
|
||||
if isinstance(server_2, Jellyfin) and not sync_from_plex_to_jellyfin:
|
||||
logger.info("Sync from plex -> jellyfin is disabled")
|
||||
return False
|
||||
|
||||
if isinstance(server_2, Emby) and not sync_from_plex_to_emby:
|
||||
logger.info("Sync from plex -> emby is disabled")
|
||||
return False
|
||||
|
||||
if isinstance(server_2, Plex) and not sync_from_plex_to_plex:
|
||||
logger.info("Sync from plex -> plex is disabled")
|
||||
return False
|
||||
|
||||
if isinstance(server_1, Jellyfin):
|
||||
if isinstance(server_2, Plex) and not sync_from_jelly_to_plex:
|
||||
logger.info("Sync from jellyfin -> plex is disabled")
|
||||
return False
|
||||
|
||||
if isinstance(server_2, Jellyfin) and not sync_from_jelly_to_jellyfin:
|
||||
logger.info("Sync from jellyfin -> jellyfin is disabled")
|
||||
return False
|
||||
|
||||
if isinstance(server_2, Emby) and not sync_from_jelly_to_emby:
|
||||
logger.info("Sync from jellyfin -> emby is disabled")
|
||||
return False
|
||||
|
||||
if isinstance(server_1, Emby):
|
||||
if isinstance(server_2, Plex) and not sync_from_emby_to_plex:
|
||||
logger.info("Sync from emby -> plex is disabled")
|
||||
return False
|
||||
|
||||
if isinstance(server_2, Jellyfin) and not sync_from_emby_to_jellyfin:
|
||||
logger.info("Sync from emby -> jellyfin is disabled")
|
||||
return False
|
||||
|
||||
if isinstance(server_2, Emby) and not sync_from_emby_to_emby:
|
||||
logger.info("Sync from emby -> emby is disabled")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main_loop(env) -> None:
|
||||
dryrun = str_to_bool(get_env_value(env, "DRYRUN", "False"))
|
||||
logger.info(f"Dryrun: {dryrun}")
|
||||
|
||||
user_mapping_env = get_env_value(env, "USER_MAPPING", None)
|
||||
user_mapping = None
|
||||
if user_mapping_env:
|
||||
user_mapping = json.loads(user_mapping_env.lower())
|
||||
logger.info(f"User Mapping: {user_mapping}")
|
||||
|
||||
library_mapping_env = get_env_value(env, "LIBRARY_MAPPING", None)
|
||||
library_mapping = None
|
||||
if library_mapping_env:
|
||||
library_mapping = json.loads(library_mapping_env)
|
||||
logger.info(f"Library Mapping: {library_mapping}")
|
||||
|
||||
# Create (black/white)lists
|
||||
logger.info("Creating (black/white)lists")
|
||||
blacklist_library = parse_string_to_list(
|
||||
get_env_value(env, "BLACKLIST_LIBRARY", None)
|
||||
)
|
||||
whitelist_library = parse_string_to_list(
|
||||
get_env_value(env, "WHITELIST_LIBRARY", None)
|
||||
)
|
||||
blacklist_library_type = parse_string_to_list(
|
||||
get_env_value(env, "BLACKLIST_LIBRARY_TYPE", None)
|
||||
)
|
||||
whitelist_library_type = parse_string_to_list(
|
||||
get_env_value(env, "WHITELIST_LIBRARY_TYPE", None)
|
||||
)
|
||||
blacklist_users = parse_string_to_list(get_env_value(env, "BLACKLIST_USERS", None))
|
||||
whitelist_users = parse_string_to_list(get_env_value(env, "WHITELIST_USERS", None))
|
||||
|
||||
(
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
) = setup_black_white_lists(
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
library_mapping,
|
||||
user_mapping,
|
||||
)
|
||||
|
||||
# Create server connections
|
||||
logger.info("Creating server connections")
|
||||
servers = generate_server_connections(env)
|
||||
|
||||
for server_1 in servers:
|
||||
# If server is the final server in the list, then we are done with the loop
|
||||
if server_1 == servers[-1]:
|
||||
break
|
||||
|
||||
# Store a copy of server_1_watched that way it can be used multiple times without having to regather everyones watch history every single time
|
||||
server_1_watched = None
|
||||
|
||||
# Start server_2 at the next server in the list
|
||||
for server_2 in servers[servers.index(server_1) + 1 :]:
|
||||
# Check if server 1 and server 2 are going to be synced in either direction, skip if not
|
||||
if not should_sync_server(
|
||||
env, server_1, server_2
|
||||
) and not should_sync_server(env, server_2, server_1):
|
||||
continue
|
||||
|
||||
logger.info(f"Server 1: {type(server_1)}: {server_1.info()}")
|
||||
logger.info(f"Server 2: {type(server_2)}: {server_2.info()}")
|
||||
|
||||
# Create users list
|
||||
logger.info("Creating users list")
|
||||
server_1_users, server_2_users = setup_users(
|
||||
server_1, server_2, blacklist_users, whitelist_users, user_mapping
|
||||
)
|
||||
|
||||
server_1_libraries, server_2_libraries = setup_libraries(
|
||||
server_1,
|
||||
server_2,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
logger.info(f"Server 1 syncing libraries: {server_1_libraries}")
|
||||
logger.info(f"Server 2 syncing libraries: {server_2_libraries}")
|
||||
|
||||
logger.info("Creating watched lists", 1)
|
||||
server_1_watched = server_1.get_watched(
|
||||
server_1_users, server_1_libraries, server_1_watched
|
||||
)
|
||||
logger.info("Finished creating watched list server 1")
|
||||
|
||||
server_2_watched = server_2.get_watched(server_2_users, server_2_libraries)
|
||||
logger.info("Finished creating watched list server 2")
|
||||
|
||||
logger.trace(f"Server 1 watched: {server_1_watched}")
|
||||
logger.trace(f"Server 2 watched: {server_2_watched}")
|
||||
|
||||
logger.info("Cleaning Server 1 Watched", 1)
|
||||
server_1_watched_filtered = cleanup_watched(
|
||||
server_1_watched, server_2_watched, user_mapping, library_mapping
|
||||
)
|
||||
|
||||
logger.info("Cleaning Server 2 Watched", 1)
|
||||
server_2_watched_filtered = cleanup_watched(
|
||||
server_2_watched, server_1_watched, user_mapping, library_mapping
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"server 1 watched that needs to be synced to server 2:\n{server_1_watched_filtered}",
|
||||
)
|
||||
logger.debug(
|
||||
f"server 2 watched that needs to be synced to server 1:\n{server_2_watched_filtered}",
|
||||
)
|
||||
|
||||
if should_sync_server(env, server_2, server_1):
|
||||
logger.info(f"Syncing {server_2.info()} -> {server_1.info()}")
|
||||
|
||||
# Add server_2_watched_filtered to server_1_watched that way the stored version isn't stale for the next server
|
||||
if not dryrun:
|
||||
server_1_watched = merge_server_watched(
|
||||
server_1_watched,
|
||||
server_2_watched_filtered,
|
||||
user_mapping,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
server_1.update_watched(
|
||||
server_2_watched_filtered,
|
||||
user_mapping,
|
||||
library_mapping,
|
||||
dryrun,
|
||||
)
|
||||
|
||||
if should_sync_server(env, server_1, server_2):
|
||||
logger.info(f"Syncing {server_1.info()} -> {server_2.info()}")
|
||||
server_2.update_watched(
|
||||
server_1_watched_filtered,
|
||||
user_mapping,
|
||||
library_mapping,
|
||||
dryrun,
|
||||
)
|
||||
|
||||
|
||||
@logger.catch
|
||||
def main() -> None:
|
||||
# Get environment variables
|
||||
env_file = get_env_value(None, "ENV_FILE", ".env")
|
||||
env = dotenv_values(env_file)
|
||||
|
||||
run_only_once = str_to_bool(get_env_value(env, "RUN_ONLY_ONCE", "False"))
|
||||
sleep_duration = float(get_env_value(env, "SLEEP_DURATION", "3600"))
|
||||
log_file = get_env_value(env, "LOG_FILE", "log.log")
|
||||
debug_level = get_env_value(env, "DEBUG_LEVEL", "INFO")
|
||||
if debug_level:
|
||||
debug_level = debug_level.upper()
|
||||
|
||||
times: list[float] = []
|
||||
while True:
|
||||
try:
|
||||
start = perf_counter()
|
||||
# Reconfigure the logger on each loop so the logs are rotated on each run
|
||||
configure_logger(log_file, debug_level)
|
||||
main_loop(env)
|
||||
end = perf_counter()
|
||||
times.append(end - start)
|
||||
|
||||
if len(times) > 0:
|
||||
logger.info(f"Average time: {sum(times) / len(times)}")
|
||||
|
||||
if run_only_once:
|
||||
break
|
||||
|
||||
logger.info(f"Looping in {sleep_duration}")
|
||||
sleep(sleep_duration)
|
||||
|
||||
except Exception as error:
|
||||
if isinstance(error, list):
|
||||
for message in error:
|
||||
logger.error(message)
|
||||
else:
|
||||
logger.error(error)
|
||||
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
if run_only_once:
|
||||
break
|
||||
|
||||
logger.info(f"Retrying in {sleep_duration}")
|
||||
sleep(sleep_duration)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
if len(times) > 0:
|
||||
logger.info(f"Average time: {sum(times) / len(times)}")
|
||||
logger.info("Exiting")
|
||||
os._exit(0)
|
||||
|
|
|
|||
927
src/plex.py
927
src/plex.py
|
|
@ -1,305 +1,622 @@
|
|||
import re
|
||||
|
||||
from plexapi.server import PlexServer
|
||||
from plexapi.myplex import MyPlexAccount
|
||||
|
||||
from src.functions import logger, search_mapping, check_skip_logic, generate_library_guids_dict, future_thread_executor
|
||||
|
||||
|
||||
# class plex accept base url and token and username and password but default with none
|
||||
class Plex:
|
||||
def __init__(self, baseurl=None, token=None, username=None, password=None, servername=None):
|
||||
self.baseurl = baseurl
|
||||
self.token = token
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.servername = servername
|
||||
self.plex = self.login()
|
||||
self.admin_user = self.plex.myPlexAccount()
|
||||
self.users = self.get_users()
|
||||
|
||||
def login(self):
|
||||
try:
|
||||
if self.baseurl and self.token:
|
||||
# Login via token
|
||||
plex = PlexServer(self.baseurl, self.token)
|
||||
elif self.username and self.password and self.servername:
|
||||
# Login via plex account
|
||||
account = MyPlexAccount(self.username, self.password)
|
||||
plex = account.resource(self.servername).connect()
|
||||
else:
|
||||
raise Exception("No complete plex credentials provided")
|
||||
|
||||
return plex
|
||||
except Exception as e:
|
||||
if self.username or self.password:
|
||||
msg = f"Failed to login via plex account {self.username}"
|
||||
logger(f"Plex: Failed to login, {msg}, Error: {e}", 2)
|
||||
else:
|
||||
logger(f"Plex: Failed to login, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
def get_users(self):
|
||||
try:
|
||||
users = self.plex.myPlexAccount().users()
|
||||
|
||||
# append self to users
|
||||
users.append(self.plex.myPlexAccount())
|
||||
|
||||
return users
|
||||
except Exception as e:
|
||||
logger(f"Plex: Failed to get users, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_user_watched(self, user, user_plex, library):
|
||||
try:
|
||||
user_name = user.title.lower()
|
||||
user_watched = {}
|
||||
user_watched[user_name] = {}
|
||||
|
||||
logger(f"Plex: Generating watched for {user_name} in library {library.title}", 0)
|
||||
|
||||
if library.type == "movie":
|
||||
user_watched[user_name][library.title] = []
|
||||
|
||||
library_videos = user_plex.library.section(library.title)
|
||||
for video in library_videos.search(unwatched=False):
|
||||
movie_guids = {}
|
||||
for guid in video.guids:
|
||||
guid_source = re.search(r'(.*)://', guid.id).group(1).lower()
|
||||
guid_id = re.search(r'://(.*)', guid.id).group(1)
|
||||
movie_guids[guid_source] = guid_id
|
||||
|
||||
movie_guids["title"] = video.title
|
||||
movie_guids["locations"] = tuple([x.split("/")[-1] for x in video.locations])
|
||||
|
||||
user_watched[user_name][library.title].append(movie_guids)
|
||||
|
||||
elif library.type == "show":
|
||||
user_watched[user_name][library.title] = {}
|
||||
|
||||
library_videos = user_plex.library.section(library.title)
|
||||
for show in library_videos.search(unwatched=False):
|
||||
show_guids = {}
|
||||
for show_guid in show.guids:
|
||||
# Extract after :// from guid.id
|
||||
show_guid_source = re.search(r'(.*)://', show_guid.id).group(1).lower()
|
||||
show_guid_id = re.search(r'://(.*)', show_guid.id).group(1)
|
||||
show_guids[show_guid_source] = show_guid_id
|
||||
|
||||
show_guids["title"] = show.title
|
||||
show_guids["locations"] = tuple([x.split("/")[-1] for x in show.locations])
|
||||
show_guids = frozenset(show_guids.items())
|
||||
|
||||
for season in show.seasons():
|
||||
episode_guids = []
|
||||
for episode in season.episodes():
|
||||
if episode.viewCount > 0:
|
||||
episode_guids_temp = {}
|
||||
for guid in episode.guids:
|
||||
# Extract after :// from guid.id
|
||||
guid_source = re.search(r'(.*)://', guid.id).group(1).lower()
|
||||
guid_id = re.search(r'://(.*)', guid.id).group(1)
|
||||
episode_guids_temp[guid_source] = guid_id
|
||||
|
||||
episode_guids_temp["locations"] = tuple([x.split("/")[-1] for x in episode.locations])
|
||||
episode_guids.append(episode_guids_temp)
|
||||
|
||||
if episode_guids:
|
||||
# append show, season, episode
|
||||
if show_guids not in user_watched[user_name][library.title]:
|
||||
user_watched[user_name][library.title][show_guids] = {}
|
||||
if season.title not in user_watched[user_name][library.title][show_guids]:
|
||||
user_watched[user_name][library.title][show_guids][season.title] = {}
|
||||
user_watched[user_name][library.title][show_guids][season.title] = episode_guids
|
||||
|
||||
|
||||
return user_watched
|
||||
except Exception as e:
|
||||
logger(f"Plex: Failed to get watched for {user_name} in library {library.title}, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
def get_watched(self, users, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping):
|
||||
try:
|
||||
# Get all libraries
|
||||
users_watched = {}
|
||||
args = []
|
||||
|
||||
for user in users:
|
||||
if self.admin_user == user:
|
||||
user_plex = self.plex
|
||||
else:
|
||||
user_plex = PlexServer(self.plex._baseurl, user.get_token(self.plex.machineIdentifier))
|
||||
|
||||
libraries = user_plex.library.sections()
|
||||
|
||||
for library in libraries:
|
||||
library_title = library.title
|
||||
library_type = library.type
|
||||
|
||||
skip_reason = check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping)
|
||||
|
||||
if skip_reason:
|
||||
logger(f"Plex: Skipping library {library_title} {skip_reason}", 1)
|
||||
continue
|
||||
|
||||
args.append([self.get_user_watched, user, user_plex, library])
|
||||
|
||||
for user_watched in future_thread_executor(args):
|
||||
for user, user_watched_temp in user_watched.items():
|
||||
if user not in users_watched:
|
||||
users_watched[user] = {}
|
||||
users_watched[user].update(user_watched_temp)
|
||||
|
||||
return users_watched
|
||||
except Exception as e:
|
||||
logger(f"Plex: Failed to get watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
def update_user_watched (self, user, user_plex, library, videos, dryrun):
|
||||
try:
|
||||
logger(f"Plex: Updating watched for {user.title} in library {library}", 1)
|
||||
library_videos = user_plex.library.section(library)
|
||||
|
||||
if library_videos.type == "movie":
|
||||
_, _, videos_movies_ids = generate_library_guids_dict(videos, 2)
|
||||
for movies_search in library_videos.search(unwatched=True):
|
||||
movie_found = False
|
||||
for movie_location in movies_search.locations:
|
||||
if movie_location.split("/")[-1] in videos_movies_ids["locations"]:
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if not movie_found:
|
||||
for movie_guid in movies_search.guids:
|
||||
movie_guid_source = re.search(r'(.*)://', movie_guid.id).group(1).lower()
|
||||
movie_guid_id = re.search(r'://(.*)', movie_guid.id).group(1)
|
||||
|
||||
# If movie provider source and movie provider id are in videos_movie_ids exactly, then the movie is in the list
|
||||
if movie_guid_source in videos_movies_ids.keys():
|
||||
if movie_guid_id in videos_movies_ids[movie_guid_source]:
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if movie_found:
|
||||
if movies_search.viewCount == 0:
|
||||
msg = f"{movies_search.title} as watched for {user.title} in {library} for Plex"
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
movies_search.markWatched()
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
|
||||
|
||||
elif library_videos.type == "show":
|
||||
videos_shows_ids, videos_episode_ids, _ = generate_library_guids_dict(videos, 3)
|
||||
|
||||
for show_search in library_videos.search(unwatched=True):
|
||||
show_found = False
|
||||
for show_location in show_search.locations:
|
||||
if show_location.split("/")[-1] in videos_shows_ids["locations"]:
|
||||
show_found = True
|
||||
break
|
||||
|
||||
if not show_found:
|
||||
for show_guid in show_search.guids:
|
||||
show_guid_source = re.search(r'(.*)://', show_guid.id).group(1).lower()
|
||||
show_guid_id = re.search(r'://(.*)', show_guid.id).group(1)
|
||||
|
||||
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
||||
if show_guid_source in videos_shows_ids.keys():
|
||||
if show_guid_id in videos_shows_ids[show_guid_source]:
|
||||
show_found = True
|
||||
break
|
||||
|
||||
if show_found:
|
||||
for episode_search in show_search.episodes():
|
||||
episode_found = False
|
||||
|
||||
for episode_location in episode_search.locations:
|
||||
if episode_location.split("/")[-1] in videos_episode_ids["locations"]:
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if not episode_found:
|
||||
for episode_guid in episode_search.guids:
|
||||
episode_guid_source = re.search(r'(.*)://', episode_guid.id).group(1).lower()
|
||||
episode_guid_id = re.search(r'://(.*)', episode_guid.id).group(1)
|
||||
|
||||
# If episode provider source and episode provider id are in videos_episode_ids exactly, then the episode is in the list
|
||||
if episode_guid_source in videos_episode_ids.keys():
|
||||
if episode_guid_id in videos_episode_ids[episode_guid_source]:
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if episode_found:
|
||||
if episode_search.viewCount == 0:
|
||||
msg = f"{show_search.title} {episode_search.title} as watched for {user.title} in {library} for Plex"
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
episode_search.markWatched()
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
except Exception as e:
|
||||
logger(f"Plex: Failed to update watched for {user.title} in library {library}, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
def update_watched(self, watched_list, user_mapping=None, library_mapping=None, dryrun=False):
|
||||
try:
|
||||
args = []
|
||||
|
||||
for user, libraries in watched_list.items():
|
||||
user_other = None
|
||||
# If type of user is dict
|
||||
if user_mapping:
|
||||
if user in user_mapping.keys():
|
||||
user_other = user_mapping[user]
|
||||
elif user in user_mapping.values():
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
|
||||
for index, value in enumerate(self.users):
|
||||
if user.lower() == value.title.lower():
|
||||
user = self.users[index]
|
||||
break
|
||||
elif user_other and user_other.lower() == value.title.lower():
|
||||
user = self.users[index]
|
||||
break
|
||||
|
||||
if self.admin_user == user:
|
||||
user_plex = self.plex
|
||||
else:
|
||||
user_plex = PlexServer(self.plex._baseurl, user.get_token(self.plex.machineIdentifier))
|
||||
|
||||
for library, videos in libraries.items():
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
if library in library_mapping.keys():
|
||||
library_other = library_mapping[library]
|
||||
elif library in library_mapping.values():
|
||||
library_other = search_mapping(library_mapping, library)
|
||||
|
||||
# if library in plex library list
|
||||
library_list = user_plex.library.sections()
|
||||
if library.lower() not in [x.title.lower() for x in library_list]:
|
||||
if library_other:
|
||||
if library_other.lower() in [x.title.lower() for x in library_list]:
|
||||
logger(f"Plex: Library {library} not found, but {library_other} found, using {library_other}", 1)
|
||||
library = library_other
|
||||
else:
|
||||
logger(f"Plex: Library {library} or {library_other} not found in library list", 2)
|
||||
continue
|
||||
else:
|
||||
logger(f"Plex: Library {library} not found in library list", 2)
|
||||
continue
|
||||
|
||||
|
||||
args.append([self.update_user_watched, user, user_plex, library, videos, dryrun])
|
||||
|
||||
future_thread_executor(args)
|
||||
except Exception as e:
|
||||
logger(f"Plex: Failed to update watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
from datetime import datetime, timezone
|
||||
import requests
|
||||
from loguru import logger
|
||||
|
||||
from urllib3.poolmanager import PoolManager
|
||||
from math import floor
|
||||
|
||||
from requests.adapters import HTTPAdapter as RequestsHTTPAdapter
|
||||
|
||||
from plexapi.video import Show, Episode, Movie
|
||||
from plexapi.server import PlexServer
|
||||
from plexapi.myplex import MyPlexAccount, MyPlexUser
|
||||
from plexapi.library import MovieSection, ShowSection
|
||||
|
||||
from src.functions import (
|
||||
filename_from_any_path,
|
||||
search_mapping,
|
||||
log_marked,
|
||||
str_to_bool,
|
||||
get_env_value,
|
||||
)
|
||||
from src.watched import (
|
||||
LibraryData,
|
||||
MediaIdentifiers,
|
||||
MediaItem,
|
||||
WatchedStatus,
|
||||
Series,
|
||||
UserData,
|
||||
check_same_identifiers,
|
||||
)
|
||||
|
||||
|
||||
# Bypass hostname validation for ssl. Taken from https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186
|
||||
class HostNameIgnoringAdapter(RequestsHTTPAdapter):
|
||||
def init_poolmanager(
|
||||
self, connections: int, maxsize: int | None, block=..., **pool_kwargs
|
||||
) -> None:
|
||||
self.poolmanager = PoolManager(
|
||||
num_pools=connections,
|
||||
maxsize=maxsize,
|
||||
block=block,
|
||||
assert_hostname=False,
|
||||
**pool_kwargs,
|
||||
)
|
||||
|
||||
|
||||
def extract_guids_from_item(
|
||||
item: Movie | Show | Episode, generate_guids: bool
|
||||
) -> dict[str, str]:
|
||||
# If GENERATE_GUIDS is set to False, then return an empty dict
|
||||
if not generate_guids:
|
||||
return {}
|
||||
|
||||
guids: dict[str, str] = dict(
|
||||
guid.id.split("://")
|
||||
for guid in item.guids
|
||||
if guid.id and len(guid.id.strip()) > 0
|
||||
)
|
||||
|
||||
return guids
|
||||
|
||||
|
||||
def extract_identifiers_from_item(
|
||||
item: Movie | Show | Episode,
|
||||
generate_guids: bool,
|
||||
generate_locations: bool,
|
||||
) -> MediaIdentifiers:
|
||||
guids = extract_guids_from_item(item, generate_guids)
|
||||
locations = (
|
||||
tuple([filename_from_any_path(loc) for loc in item.locations])
|
||||
if generate_locations
|
||||
else tuple()
|
||||
)
|
||||
|
||||
if generate_guids:
|
||||
if not guids:
|
||||
logger.debug(
|
||||
f"Plex: {item.title} has no guids{f', locations: {" ".join(item.locations)}' if generate_locations else ''}",
|
||||
)
|
||||
|
||||
if generate_locations:
|
||||
if not locations:
|
||||
logger.debug(
|
||||
f"Plex: {item.title} has no locations{f', guids: {guids}' if generate_guids else ''}",
|
||||
)
|
||||
|
||||
return MediaIdentifiers(
|
||||
title=item.title,
|
||||
locations=locations,
|
||||
imdb_id=guids.get("imdb"),
|
||||
tvdb_id=guids.get("tvdb"),
|
||||
tmdb_id=guids.get("tmdb"),
|
||||
)
|
||||
|
||||
|
||||
def get_mediaitem(
|
||||
item: Movie | Episode,
|
||||
completed: bool,
|
||||
generate_guids: bool = True,
|
||||
generate_locations: bool = True,
|
||||
) -> MediaItem:
|
||||
last_viewed_at = item.lastViewedAt
|
||||
viewed_date = datetime.today()
|
||||
|
||||
if last_viewed_at:
|
||||
viewed_date = last_viewed_at.replace(tzinfo=timezone.utc)
|
||||
|
||||
return MediaItem(
|
||||
identifiers=extract_identifiers_from_item(
|
||||
item, generate_guids, generate_locations
|
||||
),
|
||||
status=WatchedStatus(
|
||||
completed=completed, time=item.viewOffset, viewed_date=viewed_date
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# class plex accept base url and token and username and password but default with none
|
||||
class Plex:
|
||||
def __init__(
|
||||
self,
|
||||
env,
|
||||
base_url: str | None = None,
|
||||
token: str | None = None,
|
||||
user_name: str | None = None,
|
||||
password: str | None = None,
|
||||
server_name: str | None = None,
|
||||
ssl_bypass: bool = False,
|
||||
session: requests.Session | None = None,
|
||||
) -> None:
|
||||
self.env = env
|
||||
|
||||
self.server_type: str = "Plex"
|
||||
self.ssl_bypass: bool = ssl_bypass
|
||||
if ssl_bypass:
|
||||
# Session for ssl bypass
|
||||
session = requests.Session()
|
||||
# By pass ssl hostname check https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186
|
||||
session.mount("https://", HostNameIgnoringAdapter())
|
||||
self.session = session
|
||||
self.plex: PlexServer = self.login(
|
||||
base_url, token, user_name, password, server_name
|
||||
)
|
||||
|
||||
self.base_url: str = self.plex._baseurl
|
||||
|
||||
self.admin_user: MyPlexAccount = self.plex.myPlexAccount()
|
||||
self.users: list[MyPlexUser | MyPlexAccount] = self.get_users()
|
||||
self.generate_guids: bool = str_to_bool(
|
||||
get_env_value(self.env, "GENERATE_GUIDS", "True")
|
||||
)
|
||||
self.generate_locations: bool = str_to_bool(
|
||||
get_env_value(self.env, "GENERATE_LOCATIONS", "True")
|
||||
)
|
||||
|
||||
def login(
|
||||
self,
|
||||
base_url: str | None,
|
||||
token: str | None,
|
||||
user_name: str | None,
|
||||
password: str | None,
|
||||
server_name: str | None,
|
||||
) -> PlexServer:
|
||||
try:
|
||||
if base_url and token:
|
||||
plex: PlexServer = PlexServer(base_url, token, session=self.session)
|
||||
elif user_name and password and server_name:
|
||||
# Login via plex account
|
||||
account = MyPlexAccount(user_name, password)
|
||||
plex = account.resource(server_name).connect()
|
||||
else:
|
||||
raise Exception("No complete plex credentials provided")
|
||||
|
||||
return plex
|
||||
except Exception as e:
|
||||
if user_name:
|
||||
msg = f"Failed to login via plex account {user_name}"
|
||||
logger.error(f"Plex: Failed to login, {msg}, Error: {e}")
|
||||
else:
|
||||
logger.error(f"Plex: Failed to login, Error: {e}")
|
||||
raise Exception(e)
|
||||
|
||||
def info(self) -> str:
|
||||
return f"Plex {self.plex.friendlyName}: {self.plex.version}"
|
||||
|
||||
def get_users(self) -> list[MyPlexUser | MyPlexAccount]:
|
||||
try:
|
||||
users: list[MyPlexUser | MyPlexAccount] = self.plex.myPlexAccount().users()
|
||||
|
||||
# append self to users
|
||||
users.append(self.plex.myPlexAccount())
|
||||
|
||||
return users
|
||||
except Exception as e:
|
||||
logger.error(f"Plex: Failed to get users, Error: {e}")
|
||||
raise Exception(e)
|
||||
|
||||
def get_libraries(self) -> dict[str, str]:
|
||||
try:
|
||||
output = {}
|
||||
|
||||
libraries = self.plex.library.sections()
|
||||
logger.debug(
|
||||
f"Plex: All Libraries {[library.title for library in libraries]}"
|
||||
)
|
||||
|
||||
for library in libraries:
|
||||
library_title = library.title
|
||||
library_type = library.type
|
||||
|
||||
if library_type not in ["movie", "show"]:
|
||||
logger.debug(
|
||||
f"Plex: Skipping Library {library_title} found type {library_type}",
|
||||
)
|
||||
continue
|
||||
|
||||
output[library_title] = library_type
|
||||
|
||||
return output
|
||||
except Exception as e:
|
||||
logger.error(f"Plex: Failed to get libraries, Error: {e}")
|
||||
raise Exception(e)
|
||||
|
||||
def get_user_library_watched(
|
||||
self, user_name: str, user_plex: PlexServer, library: MovieSection | ShowSection
|
||||
) -> LibraryData:
|
||||
try:
|
||||
logger.info(
|
||||
f"Plex: Generating watched for {user_name} in library {library.title}",
|
||||
)
|
||||
watched = LibraryData(title=library.title)
|
||||
|
||||
library_videos = user_plex.library.section(library.title)
|
||||
|
||||
if library.type == "movie":
|
||||
for video in library_videos.search(
|
||||
unwatched=False
|
||||
) + library_videos.search(inProgress=True):
|
||||
if video.isWatched or video.viewOffset >= 60000:
|
||||
watched.movies.append(
|
||||
get_mediaitem(
|
||||
video,
|
||||
video.isWatched,
|
||||
self.generate_guids,
|
||||
self.generate_locations,
|
||||
)
|
||||
)
|
||||
|
||||
elif library.type == "show":
|
||||
# Keep track of processed shows to reduce duplicate shows
|
||||
processed_shows = []
|
||||
for show in library_videos.search(
|
||||
unwatched=False
|
||||
) + library_videos.search(inProgress=True):
|
||||
if show.key in processed_shows:
|
||||
continue
|
||||
processed_shows.append(show.key)
|
||||
show_guids = extract_guids_from_item(show, self.generate_guids)
|
||||
episode_mediaitem = []
|
||||
|
||||
# Fetch watched or partially watched episodes
|
||||
for episode in show.watched() + show.episodes(
|
||||
viewOffset__gte=60_000
|
||||
):
|
||||
episode_mediaitem.append(
|
||||
get_mediaitem(
|
||||
episode,
|
||||
episode.isWatched,
|
||||
self.generate_guids,
|
||||
self.generate_locations,
|
||||
)
|
||||
)
|
||||
|
||||
if episode_mediaitem:
|
||||
watched.series.append(
|
||||
Series(
|
||||
identifiers=MediaIdentifiers(
|
||||
title=show.title,
|
||||
locations=(
|
||||
tuple(
|
||||
[
|
||||
filename_from_any_path(location)
|
||||
for location in show.locations
|
||||
]
|
||||
)
|
||||
if self.generate_locations
|
||||
else tuple()
|
||||
),
|
||||
imdb_id=show_guids.get("imdb"),
|
||||
tvdb_id=show_guids.get("tvdb"),
|
||||
tmdb_id=show_guids.get("tmdb"),
|
||||
),
|
||||
episodes=episode_mediaitem,
|
||||
)
|
||||
)
|
||||
|
||||
return watched
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Plex: Failed to get watched for {user_name} in library {library.title}, Error: {e}",
|
||||
)
|
||||
return LibraryData(title=library.title)
|
||||
|
||||
def get_watched(
|
||||
self,
|
||||
users: list[MyPlexUser | MyPlexAccount],
|
||||
sync_libraries: list[str],
|
||||
users_watched: dict[str, UserData] = None,
|
||||
) -> dict[str, UserData]:
|
||||
try:
|
||||
if not users_watched:
|
||||
users_watched: dict[str, UserData] = {}
|
||||
|
||||
for user in users:
|
||||
if self.admin_user == user:
|
||||
user_plex = self.plex
|
||||
else:
|
||||
token = user.get_token(self.plex.machineIdentifier)
|
||||
if token:
|
||||
user_plex = self.login(self.base_url, token, None, None, None)
|
||||
else:
|
||||
logger.error(
|
||||
f"Plex: Failed to get token for {user.title}, skipping",
|
||||
)
|
||||
continue
|
||||
|
||||
user_name: str = (
|
||||
user.username.lower() if user.username else user.title.lower()
|
||||
)
|
||||
|
||||
libraries = user_plex.library.sections()
|
||||
|
||||
for library in libraries:
|
||||
if library.title not in sync_libraries:
|
||||
continue
|
||||
|
||||
if user_name not in users_watched:
|
||||
users_watched[user_name] = UserData()
|
||||
|
||||
if library.title in users_watched[user_name].libraries:
|
||||
logger.info(
|
||||
f"Plex: {user_name} {library.title} watched history has already been gathered, skipping"
|
||||
)
|
||||
continue
|
||||
|
||||
library_data = self.get_user_library_watched(
|
||||
user_name, user_plex, library
|
||||
)
|
||||
|
||||
users_watched[user_name].libraries[library.title] = library_data
|
||||
|
||||
return users_watched
|
||||
except Exception as e:
|
||||
logger.error(f"Plex: Failed to get users watched, Error: {e}")
|
||||
return {}
|
||||
|
||||
def update_user_watched(
|
||||
self,
|
||||
user: MyPlexAccount,
|
||||
user_plex: PlexServer,
|
||||
library_data: LibraryData,
|
||||
library_name: str,
|
||||
dryrun: bool,
|
||||
) -> None:
|
||||
# If there are no movies or shows to update, exit early.
|
||||
if not library_data.series and not library_data.movies:
|
||||
return
|
||||
|
||||
logger.info(
|
||||
f"Plex: Updating watched for {user.title} in library {library_name}"
|
||||
)
|
||||
library_section = user_plex.library.section(library_name)
|
||||
if not library_section:
|
||||
logger.error(
|
||||
f"Plex: Library {library_name} not found for {user.title}, skipping",
|
||||
)
|
||||
return
|
||||
|
||||
# Update movies.
|
||||
if library_data.movies:
|
||||
# Search for Plex movies that are currently marked as unwatched.
|
||||
for plex_movie in library_section.search(unwatched=True):
|
||||
plex_identifiers = extract_identifiers_from_item(
|
||||
plex_movie, self.generate_guids, self.generate_locations
|
||||
)
|
||||
# Check each stored movie for a match.
|
||||
for stored_movie in library_data.movies:
|
||||
if check_same_identifiers(
|
||||
plex_identifiers, stored_movie.identifiers
|
||||
):
|
||||
# If the stored movie is marked as watched (or has enough progress),
|
||||
# update the Plex movie accordingly.
|
||||
if stored_movie.status.completed:
|
||||
msg = f"Plex: {plex_movie.title} as watched for {user.title} in {library_name}"
|
||||
if not dryrun:
|
||||
try:
|
||||
plex_movie.markWatched()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Plex: Failed to mark {plex_movie.title} as watched, Error: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
|
||||
log_marked(
|
||||
"Plex",
|
||||
user_plex.friendlyName,
|
||||
user.title,
|
||||
library_name,
|
||||
plex_movie.title,
|
||||
None,
|
||||
None,
|
||||
mark_file=get_env_value(
|
||||
self.env, "MARK_FILE", "mark.log"
|
||||
),
|
||||
)
|
||||
else:
|
||||
msg = f"Plex: {plex_movie.title} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user.title} in {library_name}"
|
||||
if not dryrun:
|
||||
try:
|
||||
plex_movie.updateTimeline(stored_movie.status.time)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Plex: Failed to update {plex_movie.title} timeline, Error: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
|
||||
log_marked(
|
||||
"Plex",
|
||||
user_plex.friendlyName,
|
||||
user.title,
|
||||
library_name,
|
||||
plex_movie.title,
|
||||
duration=stored_movie.status.time,
|
||||
mark_file=get_env_value(
|
||||
self.env, "MARK_FILE", "mark.log"
|
||||
),
|
||||
)
|
||||
# Once matched, no need to check further.
|
||||
break
|
||||
|
||||
# Update TV Shows (series/episodes).
|
||||
if library_data.series:
|
||||
# For each Plex show in the library section:
|
||||
plex_shows = library_section.search(unwatched=True)
|
||||
for plex_show in plex_shows:
|
||||
# Extract identifiers from the Plex show.
|
||||
plex_show_identifiers = extract_identifiers_from_item(
|
||||
plex_show, self.generate_guids, self.generate_locations
|
||||
)
|
||||
# Try to find a matching series in your stored library.
|
||||
for stored_series in library_data.series:
|
||||
if check_same_identifiers(
|
||||
plex_show_identifiers, stored_series.identifiers
|
||||
):
|
||||
logger.trace(f"Found matching show for '{plex_show.title}'")
|
||||
# Now update episodes.
|
||||
# Get the list of Plex episodes for this show.
|
||||
plex_episodes = plex_show.episodes()
|
||||
for plex_episode in plex_episodes:
|
||||
plex_episode_identifiers = extract_identifiers_from_item(
|
||||
plex_episode,
|
||||
self.generate_guids,
|
||||
self.generate_locations,
|
||||
)
|
||||
for stored_ep in stored_series.episodes:
|
||||
if check_same_identifiers(
|
||||
plex_episode_identifiers, stored_ep.identifiers
|
||||
):
|
||||
if stored_ep.status.completed:
|
||||
msg = f"Plex: {plex_show.title} {plex_episode.title} as watched for {user.title} in {library_name}"
|
||||
if not dryrun:
|
||||
try:
|
||||
plex_episode.markWatched()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Plex: Failed to mark {plex_show.title} {plex_episode.title} as watched, Error: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
logger.success(
|
||||
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
|
||||
)
|
||||
log_marked(
|
||||
"Plex",
|
||||
user_plex.friendlyName,
|
||||
user.title,
|
||||
library_name,
|
||||
plex_show.title,
|
||||
plex_episode.title,
|
||||
mark_file=get_env_value(
|
||||
self.env, "MARK_FILE", "mark.log"
|
||||
),
|
||||
)
|
||||
else:
|
||||
msg = f"Plex: {plex_show.title} {plex_episode.title} as partially watched for {floor(stored_ep.status.time / 60_000)} minutes for {user.title} in {library_name}"
|
||||
if not dryrun:
|
||||
try:
|
||||
plex_episode.updateTimeline(
|
||||
stored_ep.status.time
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Plex: Failed to update {plex_show.title} {plex_episode.title} timeline, Error: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
logger.success(
|
||||
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
|
||||
)
|
||||
log_marked(
|
||||
"Plex",
|
||||
user_plex.friendlyName,
|
||||
user.title,
|
||||
library_name,
|
||||
plex_show.title,
|
||||
plex_episode.title,
|
||||
stored_ep.status.time,
|
||||
mark_file=get_env_value(
|
||||
self.env, "MARK_FILE", "mark.log"
|
||||
),
|
||||
)
|
||||
break # Found a matching episode.
|
||||
break # Found a matching show.
|
||||
|
||||
def update_watched(
|
||||
self,
|
||||
watched_list: dict[str, UserData],
|
||||
user_mapping: dict[str, str] | None = None,
|
||||
library_mapping: dict[str, str] | None = None,
|
||||
dryrun: bool = False,
|
||||
) -> None:
|
||||
for user, user_data in watched_list.items():
|
||||
user_other = None
|
||||
# If type of user is dict
|
||||
if user_mapping:
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
|
||||
for index, value in enumerate(self.users):
|
||||
username_title = (
|
||||
value.username.lower() if value.username else value.title.lower()
|
||||
)
|
||||
|
||||
if user.lower() == username_title:
|
||||
user = self.users[index]
|
||||
break
|
||||
elif user_other and user_other.lower() == username_title:
|
||||
user = self.users[index]
|
||||
break
|
||||
|
||||
if self.admin_user == user:
|
||||
user_plex = self.plex
|
||||
else:
|
||||
if isinstance(user, str):
|
||||
logger.debug(
|
||||
f"Plex: {user} is not a plex object, attempting to get object for user",
|
||||
)
|
||||
user = self.plex.myPlexAccount().user(user)
|
||||
|
||||
if not isinstance(user, MyPlexUser):
|
||||
logger.error(f"Plex: {user} failed to get PlexUser")
|
||||
continue
|
||||
|
||||
token = user.get_token(self.plex.machineIdentifier)
|
||||
if token:
|
||||
user_plex = PlexServer(
|
||||
self.base_url,
|
||||
token,
|
||||
session=self.session,
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"Plex: Failed to get token for {user.title}, skipping",
|
||||
)
|
||||
continue
|
||||
|
||||
if not user_plex:
|
||||
logger.error(f"Plex: {user} Failed to get PlexServer")
|
||||
continue
|
||||
|
||||
for library_name in user_data.libraries:
|
||||
library_data = user_data.libraries[library_name]
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
library_other = search_mapping(library_mapping, library_name)
|
||||
# if library in plex library list
|
||||
library_list = user_plex.library.sections()
|
||||
if library_name.lower() not in [x.title.lower() for x in library_list]:
|
||||
if library_other:
|
||||
if library_other.lower() in [
|
||||
x.title.lower() for x in library_list
|
||||
]:
|
||||
logger.info(
|
||||
f"Plex: Library {library_name} not found, but {library_other} found, using {library_other}",
|
||||
)
|
||||
library_name = library_other
|
||||
else:
|
||||
logger.info(
|
||||
f"Plex: Library {library_name} or {library_other} not found in library list",
|
||||
)
|
||||
continue
|
||||
else:
|
||||
logger.info(
|
||||
f"Plex: Library {library_name} not found in library list",
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
self.update_user_watched(
|
||||
user,
|
||||
user_plex,
|
||||
library_data,
|
||||
library_name,
|
||||
dryrun,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Plex: Failed to update watched for {user.title} in {library_name}, Error: {e}",
|
||||
)
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -0,0 +1,152 @@
|
|||
from plexapi.myplex import MyPlexAccount, MyPlexUser
|
||||
from loguru import logger
|
||||
|
||||
from src.emby import Emby
|
||||
from src.jellyfin import Jellyfin
|
||||
from src.plex import Plex
|
||||
from src.functions import search_mapping
|
||||
|
||||
|
||||
def generate_user_list(server: Plex | Jellyfin | Emby) -> list[str]:
|
||||
# generate list of users from server 1 and server 2
|
||||
|
||||
server_users: list[str] = []
|
||||
if isinstance(server, Plex):
|
||||
for user in server.users:
|
||||
server_users.append(
|
||||
user.username.lower() if user.username else user.title.lower()
|
||||
)
|
||||
|
||||
elif isinstance(server, (Jellyfin, Emby)):
|
||||
server_users = [key.lower() for key in server.users.keys()]
|
||||
|
||||
return server_users
|
||||
|
||||
|
||||
def combine_user_lists(
|
||||
server_1_users: list[str],
|
||||
server_2_users: list[str],
|
||||
user_mapping: dict[str, str] | None,
|
||||
) -> dict[str, str]:
|
||||
# combined list of overlapping users from plex and jellyfin
|
||||
users: dict[str, str] = {}
|
||||
|
||||
for server_1_user in server_1_users:
|
||||
if user_mapping:
|
||||
mapped_user = search_mapping(user_mapping, server_1_user)
|
||||
if mapped_user in server_2_users:
|
||||
users[server_1_user] = mapped_user
|
||||
continue
|
||||
|
||||
if server_1_user in server_2_users:
|
||||
users[server_1_user] = server_1_user
|
||||
|
||||
for server_2_user in server_2_users:
|
||||
if user_mapping:
|
||||
mapped_user = search_mapping(user_mapping, server_2_user)
|
||||
if mapped_user in server_1_users:
|
||||
users[mapped_user] = server_2_user
|
||||
continue
|
||||
|
||||
if server_2_user in server_1_users:
|
||||
users[server_2_user] = server_2_user
|
||||
|
||||
return users
|
||||
|
||||
|
||||
def filter_user_lists(
|
||||
users: dict[str, str], blacklist_users: list[str], whitelist_users: list[str]
|
||||
) -> dict[str, str]:
|
||||
users_filtered: dict[str, str] = {}
|
||||
for user in users:
|
||||
# whitelist_user is not empty and user lowercase is not in whitelist lowercase
|
||||
if len(whitelist_users) > 0:
|
||||
if user not in whitelist_users and users[user] not in whitelist_users:
|
||||
logger.info(f"{user} or {users[user]} is not in whitelist")
|
||||
continue
|
||||
|
||||
if user not in blacklist_users and users[user] not in blacklist_users:
|
||||
users_filtered[user] = users[user]
|
||||
|
||||
return users_filtered
|
||||
|
||||
|
||||
def generate_server_users(
|
||||
server: Plex | Jellyfin | Emby,
|
||||
users: dict[str, str],
|
||||
) -> list[MyPlexAccount] | dict[str, str] | None:
|
||||
if isinstance(server, Plex):
|
||||
plex_server_users: list[MyPlexAccount] = []
|
||||
for plex_user in server.users:
|
||||
username_title = (
|
||||
plex_user.username if plex_user.username else plex_user.title
|
||||
)
|
||||
|
||||
if (
|
||||
username_title.lower() in users.keys()
|
||||
or username_title.lower() in users.values()
|
||||
):
|
||||
plex_server_users.append(plex_user)
|
||||
|
||||
return plex_server_users
|
||||
elif isinstance(server, (Jellyfin, Emby)):
|
||||
jelly_emby_server_users: dict[str, str] = {}
|
||||
for jellyfin_user, jellyfin_id in server.users.items():
|
||||
if (
|
||||
jellyfin_user.lower() in users.keys()
|
||||
or jellyfin_user.lower() in users.values()
|
||||
):
|
||||
jelly_emby_server_users[jellyfin_user] = jellyfin_id
|
||||
|
||||
return jelly_emby_server_users
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def setup_users(
|
||||
server_1: Plex | Jellyfin | Emby,
|
||||
server_2: Plex | Jellyfin | Emby,
|
||||
blacklist_users: list[str],
|
||||
whitelist_users: list[str],
|
||||
user_mapping: dict[str, str] | None = None,
|
||||
) -> tuple[
|
||||
list[MyPlexAccount | MyPlexUser] | dict[str, str],
|
||||
list[MyPlexAccount | MyPlexUser] | dict[str, str],
|
||||
]:
|
||||
server_1_users = generate_user_list(server_1)
|
||||
server_2_users = generate_user_list(server_2)
|
||||
logger.debug(f"Server 1 users: {server_1_users}")
|
||||
logger.debug(f"Server 2 users: {server_2_users}")
|
||||
|
||||
users = combine_user_lists(server_1_users, server_2_users, user_mapping)
|
||||
logger.debug(f"User list that exist on both servers {users}")
|
||||
|
||||
users_filtered = filter_user_lists(users, blacklist_users, whitelist_users)
|
||||
logger.debug(f"Filtered user list {users_filtered}")
|
||||
|
||||
output_server_1_users = generate_server_users(server_1, users_filtered)
|
||||
output_server_2_users = generate_server_users(server_2, users_filtered)
|
||||
|
||||
# Check if users is none or empty
|
||||
if output_server_1_users is None or len(output_server_1_users) == 0:
|
||||
logger.warning(
|
||||
f"No users found for server 1 {type(server_1)}, users: {server_1_users}, overlapping users {users}, filtered users {users_filtered}, server 1 users {server_1.users}"
|
||||
)
|
||||
|
||||
if output_server_2_users is None or len(output_server_2_users) == 0:
|
||||
logger.warning(
|
||||
f"No users found for server 2 {type(server_2)}, users: {server_2_users}, overlapping users {users} filtered users {users_filtered}, server 2 users {server_2.users}"
|
||||
)
|
||||
|
||||
if (
|
||||
output_server_1_users is None
|
||||
or len(output_server_1_users) == 0
|
||||
or output_server_2_users is None
|
||||
or len(output_server_2_users) == 0
|
||||
):
|
||||
raise Exception("No users found for one or both servers")
|
||||
|
||||
logger.info(f"Server 1 users: {output_server_1_users}")
|
||||
logger.info(f"Server 2 users: {output_server_2_users}")
|
||||
|
||||
return output_server_1_users, output_server_2_users
|
||||
|
|
@ -0,0 +1,323 @@
|
|||
import copy
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel, Field
|
||||
from loguru import logger
|
||||
from typing import Any
|
||||
|
||||
from src.functions import search_mapping
|
||||
|
||||
|
||||
class MediaIdentifiers(BaseModel):
|
||||
title: str | None = None
|
||||
|
||||
# File information, will be folder for series and media file for episode/movie
|
||||
locations: tuple[str, ...] = tuple()
|
||||
|
||||
# Guids
|
||||
imdb_id: str | None = None
|
||||
tvdb_id: str | None = None
|
||||
tmdb_id: str | None = None
|
||||
|
||||
|
||||
class WatchedStatus(BaseModel):
|
||||
completed: bool
|
||||
time: int
|
||||
viewed_date: datetime
|
||||
|
||||
|
||||
class MediaItem(BaseModel):
|
||||
identifiers: MediaIdentifiers
|
||||
status: WatchedStatus
|
||||
|
||||
|
||||
class Series(BaseModel):
|
||||
identifiers: MediaIdentifiers
|
||||
episodes: list[MediaItem] = Field(default_factory=list)
|
||||
|
||||
|
||||
class LibraryData(BaseModel):
|
||||
title: str
|
||||
movies: list[MediaItem] = Field(default_factory=list)
|
||||
series: list[Series] = Field(default_factory=list)
|
||||
|
||||
|
||||
class UserData(BaseModel):
|
||||
libraries: dict[str, LibraryData] = Field(default_factory=dict)
|
||||
|
||||
|
||||
def merge_mediaitem_data(ep1: MediaItem, ep2: MediaItem) -> MediaItem:
|
||||
"""
|
||||
Merge two MediaItem episodes by comparing their watched status.
|
||||
If one is completed while the other isn't, choose the completed one.
|
||||
If both are completed or both are not, choose the one with the higher time.
|
||||
"""
|
||||
if ep1.status.completed != ep2.status.completed:
|
||||
return ep1 if ep1.status.completed else ep2
|
||||
return ep1 if ep1.status.time >= ep2.status.time else ep2
|
||||
|
||||
|
||||
def merge_series_data(series1: Series, series2: Series) -> Series:
|
||||
"""
|
||||
Merge two Series objects by combining their episodes.
|
||||
For duplicate episodes (determined by check_same_identifiers), merge their watched status.
|
||||
"""
|
||||
merged_series = copy.deepcopy(series1)
|
||||
for ep in series2.episodes:
|
||||
for idx, merged_ep in enumerate(merged_series.episodes):
|
||||
if check_same_identifiers(ep.identifiers, merged_ep.identifiers):
|
||||
merged_series.episodes[idx] = merge_mediaitem_data(merged_ep, ep)
|
||||
break
|
||||
else:
|
||||
merged_series.episodes.append(copy.deepcopy(ep))
|
||||
return merged_series
|
||||
|
||||
|
||||
def merge_library_data(lib1: LibraryData, lib2: LibraryData) -> LibraryData:
|
||||
"""
|
||||
Merge two LibraryData objects by extending movies and merging series.
|
||||
For series, duplicates are determined using check_same_identifiers.
|
||||
"""
|
||||
merged = copy.deepcopy(lib1)
|
||||
|
||||
# Merge movies.
|
||||
for movie in lib2.movies:
|
||||
for idx, merged_movie in enumerate(merged.movies):
|
||||
if check_same_identifiers(movie.identifiers, merged_movie.identifiers):
|
||||
merged.movies[idx] = merge_mediaitem_data(merged_movie, movie)
|
||||
break
|
||||
else:
|
||||
merged.movies.append(copy.deepcopy(movie))
|
||||
|
||||
# Merge series.
|
||||
for series2 in lib2.series:
|
||||
for idx, series1 in enumerate(merged.series):
|
||||
if check_same_identifiers(series1.identifiers, series2.identifiers):
|
||||
merged.series[idx] = merge_series_data(series1, series2)
|
||||
break
|
||||
else:
|
||||
merged.series.append(copy.deepcopy(series2))
|
||||
|
||||
return merged
|
||||
|
||||
|
||||
def merge_user_data(user1: UserData, user2: UserData) -> UserData:
|
||||
"""
|
||||
Merge two UserData objects by merging their libraries.
|
||||
If a library exists in both, merge its content; otherwise, add the new library.
|
||||
"""
|
||||
merged_libraries = copy.deepcopy(user1.libraries)
|
||||
for lib_key, lib_data in user2.libraries.items():
|
||||
if lib_key in merged_libraries:
|
||||
merged_libraries[lib_key] = merge_library_data(
|
||||
merged_libraries[lib_key], lib_data
|
||||
)
|
||||
else:
|
||||
merged_libraries[lib_key] = copy.deepcopy(lib_data)
|
||||
return UserData(libraries=merged_libraries)
|
||||
|
||||
|
||||
def merge_server_watched(
|
||||
watched_list_1: dict[str, UserData],
|
||||
watched_list_2: dict[str, UserData],
|
||||
user_mapping: dict[str, str] | None = None,
|
||||
library_mapping: dict[str, str] | None = None,
|
||||
) -> dict[str, UserData]:
|
||||
"""
|
||||
Merge two dictionaries of UserData while taking into account possible
|
||||
differences in user and library keys via the provided mappings.
|
||||
"""
|
||||
merged_watched = copy.deepcopy(watched_list_1)
|
||||
|
||||
for user_2, user_data in watched_list_2.items():
|
||||
# Determine matching user key.
|
||||
user_key = user_mapping.get(user_2, user_2) if user_mapping else user_2
|
||||
if user_key not in merged_watched:
|
||||
merged_watched[user_2] = copy.deepcopy(user_data)
|
||||
continue
|
||||
|
||||
for lib_key, lib_data in user_data.libraries.items():
|
||||
mapped_lib_key = (
|
||||
library_mapping.get(lib_key, lib_key) if library_mapping else lib_key
|
||||
)
|
||||
if mapped_lib_key not in merged_watched[user_key].libraries:
|
||||
merged_watched[user_key].libraries[lib_key] = copy.deepcopy(lib_data)
|
||||
else:
|
||||
merged_watched[user_key].libraries[mapped_lib_key] = merge_library_data(
|
||||
merged_watched[user_key].libraries[mapped_lib_key],
|
||||
lib_data,
|
||||
)
|
||||
|
||||
return merged_watched
|
||||
|
||||
|
||||
def check_same_identifiers(item1: MediaIdentifiers, item2: MediaIdentifiers) -> bool:
|
||||
# Check for duplicate based on file locations:
|
||||
if item1.locations and item2.locations:
|
||||
if set(item1.locations) & set(item2.locations):
|
||||
return True
|
||||
|
||||
# Check for duplicate based on GUIDs:
|
||||
if (
|
||||
(item1.imdb_id and item2.imdb_id and item1.imdb_id == item2.imdb_id)
|
||||
or (item1.tvdb_id and item2.tvdb_id and item1.tvdb_id == item2.tvdb_id)
|
||||
or (item1.tmdb_id and item2.tmdb_id and item1.tmdb_id == item2.tmdb_id)
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def check_remove_entry(item1: MediaItem, item2: MediaItem) -> bool:
|
||||
"""
|
||||
Returns True if item1 (from watched_list_1) should be removed
|
||||
in favor of item2 (from watched_list_2), based on:
|
||||
- Duplicate criteria:
|
||||
* They match if any file location is shared OR
|
||||
at least one of imdb_id, tvdb_id, or tmdb_id matches.
|
||||
- Watched status:
|
||||
* If one is complete and the other is not, remove the incomplete one.
|
||||
* If both are incomplete, remove the one with lower progress (time).
|
||||
* If both are complete, remove item1 as duplicate.
|
||||
"""
|
||||
if not check_same_identifiers(item1.identifiers, item2.identifiers):
|
||||
return False
|
||||
|
||||
# Compare watched statuses.
|
||||
status1 = item1.status
|
||||
status2 = item2.status
|
||||
|
||||
# If one is complete and the other isn't, remove the one that's not complete.
|
||||
if status1.completed != status2.completed:
|
||||
if not status1.completed and status2.completed:
|
||||
return True # Remove item1 since it's not complete.
|
||||
else:
|
||||
return False # Do not remove item1; it's complete.
|
||||
|
||||
# Both have the same completed status.
|
||||
if not status1.completed and not status2.completed:
|
||||
# Both incomplete: remove the one with lower progress (time)
|
||||
if status1.time < status2.time:
|
||||
return True # Remove item1 because it has watched less.
|
||||
elif status1.time > status2.time:
|
||||
return False # Keep item1 because it has more progress.
|
||||
else:
|
||||
# Same progress; Remove duplicate
|
||||
return True
|
||||
|
||||
# If both are complete, consider item1 the duplicate and remove it.
|
||||
return True
|
||||
|
||||
|
||||
def cleanup_watched(
|
||||
watched_list_1: dict[str, UserData],
|
||||
watched_list_2: dict[str, UserData],
|
||||
user_mapping: dict[str, str] | None = None,
|
||||
library_mapping: dict[str, str] | None = None,
|
||||
) -> dict[str, UserData]:
|
||||
modified_watched_list_1 = copy.deepcopy(watched_list_1)
|
||||
|
||||
# remove entries from watched_list_1 that are in watched_list_2
|
||||
for user_1 in watched_list_1:
|
||||
user_other = None
|
||||
if user_mapping:
|
||||
user_other = search_mapping(user_mapping, user_1)
|
||||
user_2 = get_other(watched_list_2, user_1, user_other)
|
||||
if user_2 is None:
|
||||
continue
|
||||
|
||||
for library_1_key in watched_list_1[user_1].libraries:
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
library_other = search_mapping(library_mapping, library_1_key)
|
||||
library_2_key = get_other(
|
||||
watched_list_2[user_2].libraries, library_1_key, library_other
|
||||
)
|
||||
if library_2_key is None:
|
||||
continue
|
||||
|
||||
library_1 = watched_list_1[user_1].libraries[library_1_key]
|
||||
library_2 = watched_list_2[user_2].libraries[library_2_key]
|
||||
|
||||
filtered_movies = []
|
||||
for movie in library_1.movies:
|
||||
remove_flag = False
|
||||
for movie2 in library_2.movies:
|
||||
if check_remove_entry(movie, movie2):
|
||||
logger.trace(f"Removing movie: {movie.identifiers.title}")
|
||||
remove_flag = True
|
||||
break
|
||||
|
||||
if not remove_flag:
|
||||
filtered_movies.append(movie)
|
||||
|
||||
modified_watched_list_1[user_1].libraries[
|
||||
library_1_key
|
||||
].movies = filtered_movies
|
||||
|
||||
# TV Shows
|
||||
filtered_series_list = []
|
||||
for series1 in library_1.series:
|
||||
matching_series = None
|
||||
for series2 in library_2.series:
|
||||
if check_same_identifiers(series1.identifiers, series2.identifiers):
|
||||
matching_series = series2
|
||||
break
|
||||
|
||||
if matching_series is None:
|
||||
# No matching show in watched_list_2; keep the series as is.
|
||||
filtered_series_list.append(series1)
|
||||
else:
|
||||
# We have a matching show; now clean up the episodes.
|
||||
filtered_episodes = []
|
||||
for ep1 in series1.episodes:
|
||||
remove_flag = False
|
||||
for ep2 in matching_series.episodes:
|
||||
if check_remove_entry(ep1, ep2):
|
||||
logger.trace(
|
||||
f"Removing episode '{ep1.identifiers.title}' from show '{series1.identifiers.title}'",
|
||||
)
|
||||
remove_flag = True
|
||||
break
|
||||
if not remove_flag:
|
||||
filtered_episodes.append(ep1)
|
||||
|
||||
# Only keep the series if there are remaining episodes.
|
||||
if filtered_episodes:
|
||||
modified_series1 = copy.deepcopy(series1)
|
||||
modified_series1.episodes = filtered_episodes
|
||||
filtered_series_list.append(modified_series1)
|
||||
else:
|
||||
logger.trace(
|
||||
f"Removing entire show '{series1.identifiers.title}' as no episodes remain after cleanup.",
|
||||
)
|
||||
modified_watched_list_1[user_1].libraries[
|
||||
library_1_key
|
||||
].series = filtered_series_list
|
||||
|
||||
# After processing, remove any library that is completely empty.
|
||||
for user, user_data in modified_watched_list_1.items():
|
||||
new_libraries = {}
|
||||
for lib_key, library in user_data.libraries.items():
|
||||
if library.movies or library.series:
|
||||
new_libraries[lib_key] = library
|
||||
else:
|
||||
logger.trace(f"Removing empty library '{lib_key}' for user '{user}'")
|
||||
user_data.libraries = new_libraries
|
||||
|
||||
return modified_watched_list_1
|
||||
|
||||
|
||||
def get_other(
|
||||
watched_list: dict[str, Any], object_1: str, object_2: str | None
|
||||
) -> str | None:
|
||||
if object_1 in watched_list:
|
||||
return object_1
|
||||
|
||||
if object_2 and object_2 in watched_list:
|
||||
return object_2
|
||||
|
||||
logger.info(
|
||||
f"{object_1}{' and ' + object_2 if object_2 else ''} not found in watched list 2"
|
||||
)
|
||||
|
||||
return None
|
||||
|
|
@ -0,0 +1,113 @@
|
|||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "trace"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "True"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
|
||||
# Emby
|
||||
|
||||
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_BASEURL = "http://localhost:8097"
|
||||
|
||||
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "False"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "False"
|
||||
SYNC_FROM_PLEX_TO_EMBY = "False"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "False"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "False"
|
||||
SYNC_FROM_JELLYFIN_TO_EMBY = "False"
|
||||
|
||||
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||
|
|
@ -0,0 +1,113 @@
|
|||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "trace"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "True"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "False"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
|
||||
# Emby
|
||||
|
||||
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_BASEURL = "http://localhost:8097"
|
||||
|
||||
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||
|
|
@ -0,0 +1,113 @@
|
|||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "trace"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "True"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
|
||||
# Emby
|
||||
|
||||
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_BASEURL = "http://localhost:8097"
|
||||
|
||||
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "False"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "False"
|
||||
SYNC_FROM_PLEX_TO_EMBY = "False"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_EMBY_TO_PLEX = "False"
|
||||
SYNC_FROM_EMBY_TO_JELLYFIN = "False"
|
||||
SYNC_FROM_EMBY_TO_EMBY = "False"
|
||||
|
|
@ -0,0 +1,113 @@
|
|||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "trace"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "False"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
|
||||
# Emby
|
||||
|
||||
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_BASEURL = "http://localhost:8097"
|
||||
|
||||
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||
|
|
@ -0,0 +1,113 @@
|
|||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "trace"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "True"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
|
||||
# Emby
|
||||
|
||||
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_BASEURL = "http://localhost:8097"
|
||||
|
||||
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "False"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "False"
|
||||
SYNC_FROM_JELLYFIN_TO_EMBY = "False"
|
||||
|
||||
SYNC_FROM_EMBY_TO_PLEX = "False"
|
||||
SYNC_FROM_EMBY_TO_JELLYFIN = "False"
|
||||
SYNC_FROM_EMBY_TO_EMBY = "False"
|
||||
|
|
@ -0,0 +1,113 @@
|
|||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "False"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "trace"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "True"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
|
||||
# Emby
|
||||
|
||||
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_BASEURL = "http://localhost:8097"
|
||||
|
||||
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||
|
||||
|
||||
# Syncing Options
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||
|
||||
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||
|
|
@ -1 +1 @@
|
|||
pytest
|
||||
pytest==7.3.0
|
||||
|
|
|
|||
|
|
@ -0,0 +1,87 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.black_white import setup_black_white_lists
|
||||
|
||||
|
||||
def test_setup_black_white_lists():
|
||||
# Simple
|
||||
blacklist_library = ["library1", "library2"]
|
||||
whitelist_library = ["library1", "library2"]
|
||||
blacklist_library_type = ["library_type1", "library_type2"]
|
||||
whitelist_library_type = ["library_type1", "library_type2"]
|
||||
blacklist_users = ["user1", "user2"]
|
||||
whitelist_users = ["user1", "user2"]
|
||||
|
||||
(
|
||||
results_blacklist_library,
|
||||
return_whitelist_library,
|
||||
return_blacklist_library_type,
|
||||
return_whitelist_library_type,
|
||||
return_blacklist_users,
|
||||
return_whitelist_users,
|
||||
) = setup_black_white_lists(
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
)
|
||||
|
||||
assert results_blacklist_library == ["library1", "library2"]
|
||||
assert return_whitelist_library == ["library1", "library2"]
|
||||
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_blacklist_users == ["user1", "user2"]
|
||||
assert return_whitelist_users == ["user1", "user2"]
|
||||
|
||||
|
||||
def test_library_mapping_black_white_list():
|
||||
blacklist_library = ["library1", "library2"]
|
||||
whitelist_library = ["library1", "library2"]
|
||||
blacklist_library_type = ["library_type1", "library_type2"]
|
||||
whitelist_library_type = ["library_type1", "library_type2"]
|
||||
blacklist_users = ["user1", "user2"]
|
||||
whitelist_users = ["user1", "user2"]
|
||||
|
||||
# Library Mapping and user mapping
|
||||
library_mapping = {"library1": "library3"}
|
||||
user_mapping = {"user1": "user3"}
|
||||
|
||||
(
|
||||
results_blacklist_library,
|
||||
return_whitelist_library,
|
||||
return_blacklist_library_type,
|
||||
return_whitelist_library_type,
|
||||
return_blacklist_users,
|
||||
return_whitelist_users,
|
||||
) = setup_black_white_lists(
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
library_mapping,
|
||||
user_mapping,
|
||||
)
|
||||
|
||||
assert results_blacklist_library == ["library1", "library2", "library3"]
|
||||
assert return_whitelist_library == ["library1", "library2", "library3"]
|
||||
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_blacklist_users == ["user1", "user2", "user3"]
|
||||
assert return_whitelist_users == ["user1", "user2", "user3"]
|
||||
|
|
@ -0,0 +1,278 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.functions import (
|
||||
search_mapping,
|
||||
)
|
||||
|
||||
from src.library import (
|
||||
check_skip_logic,
|
||||
check_blacklist_logic,
|
||||
check_whitelist_logic,
|
||||
)
|
||||
|
||||
blacklist_library = ["TV Shows"]
|
||||
whitelist_library = ["Movies"]
|
||||
blacklist_library_type = ["episodes"]
|
||||
whitelist_library_type = ["movies"]
|
||||
library_mapping = {"Shows": "TV Shows", "Movie": "Movies"}
|
||||
|
||||
show_list = {
|
||||
frozenset(
|
||||
{
|
||||
("locations", ("The Last of Us",)),
|
||||
("tmdb", "100088"),
|
||||
("imdb", "tt3581920"),
|
||||
("tvdb", "392256"),
|
||||
("title", "The Last of Us"),
|
||||
}
|
||||
): [
|
||||
{
|
||||
"imdb": "tt11957006",
|
||||
"tmdb": "2181581",
|
||||
"tvdb": "8444132",
|
||||
"locations": (
|
||||
(
|
||||
"The Last of Us - S01E01 - When You're Lost in the Darkness WEBDL-1080p.mkv",
|
||||
)
|
||||
),
|
||||
"status": {"completed": True, "time": 0},
|
||||
}
|
||||
]
|
||||
}
|
||||
movie_list = [
|
||||
{
|
||||
"title": "Coco",
|
||||
"imdb": "tt2380307",
|
||||
"tmdb": "354912",
|
||||
"locations": [("Coco (2017) Remux-2160p.mkv", "Coco (2017) Remux-1080p.mkv")],
|
||||
"status": {"completed": True, "time": 0},
|
||||
}
|
||||
]
|
||||
|
||||
show_titles = {
|
||||
"imdb": ["tt3581920"],
|
||||
"locations": [("The Last of Us",)],
|
||||
"tmdb": ["100088"],
|
||||
"tvdb": ["392256"],
|
||||
}
|
||||
episode_titles = {
|
||||
"imdb": ["tt11957006"],
|
||||
"locations": [
|
||||
("The Last of Us - S01E01 - When You're Lost in the Darkness WEBDL-1080p.mkv",)
|
||||
],
|
||||
"tmdb": ["2181581"],
|
||||
"tvdb": ["8444132"],
|
||||
"completed": [True],
|
||||
"time": [0],
|
||||
"show": [
|
||||
{
|
||||
"imdb": "tt3581920",
|
||||
"locations": ("The Last of Us",),
|
||||
"title": "The Last of Us",
|
||||
"tmdb": "100088",
|
||||
"tvdb": "392256",
|
||||
}
|
||||
],
|
||||
}
|
||||
movie_titles = {
|
||||
"imdb": ["tt2380307"],
|
||||
"locations": [
|
||||
[
|
||||
(
|
||||
"Coco (2017) Remux-2160p.mkv",
|
||||
"Coco (2017) Remux-1080p.mkv",
|
||||
)
|
||||
]
|
||||
],
|
||||
"title": ["coco"],
|
||||
"tmdb": ["354912"],
|
||||
"completed": [True],
|
||||
"time": [0],
|
||||
}
|
||||
|
||||
|
||||
def test_check_skip_logic():
|
||||
# Failes
|
||||
library_title = "Test"
|
||||
library_type = "movies"
|
||||
skip_reason = check_skip_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
assert skip_reason == "Test is not in whitelist_library"
|
||||
|
||||
library_title = "Shows"
|
||||
library_type = "episodes"
|
||||
skip_reason = check_skip_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
assert (
|
||||
skip_reason
|
||||
== "episodes is in blacklist_library_type and TV Shows is in blacklist_library and "
|
||||
+ "episodes is not in whitelist_library_type and Shows is not in whitelist_library"
|
||||
)
|
||||
|
||||
# Passes
|
||||
library_title = "Movie"
|
||||
library_type = "movies"
|
||||
skip_reason = check_skip_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
assert skip_reason is None
|
||||
|
||||
|
||||
def test_check_blacklist_logic():
|
||||
# Fails
|
||||
library_title = "Shows"
|
||||
library_type = "episodes"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_blacklist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert (
|
||||
skip_reason
|
||||
== "episodes is in blacklist_library_type and TV Shows is in blacklist_library"
|
||||
)
|
||||
|
||||
library_title = "TV Shows"
|
||||
library_type = "episodes"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_blacklist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert (
|
||||
skip_reason
|
||||
== "episodes is in blacklist_library_type and TV Shows is in blacklist_library"
|
||||
)
|
||||
|
||||
# Passes
|
||||
library_title = "Movie"
|
||||
library_type = "movies"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_blacklist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert skip_reason is None
|
||||
|
||||
library_title = "Movies"
|
||||
library_type = "movies"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_blacklist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert skip_reason is None
|
||||
|
||||
|
||||
def test_check_whitelist_logic():
|
||||
# Fails
|
||||
library_title = "Shows"
|
||||
library_type = "episodes"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_whitelist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert (
|
||||
skip_reason
|
||||
== "episodes is not in whitelist_library_type and Shows is not in whitelist_library"
|
||||
)
|
||||
|
||||
library_title = "TV Shows"
|
||||
library_type = "episodes"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_whitelist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert (
|
||||
skip_reason
|
||||
== "episodes is not in whitelist_library_type and TV Shows is not in whitelist_library"
|
||||
)
|
||||
|
||||
# Passes
|
||||
library_title = "Movie"
|
||||
library_type = "movies"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_whitelist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert skip_reason is None
|
||||
|
||||
library_title = "Movies"
|
||||
library_type = "movies"
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
skip_reason = check_whitelist_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
library_other,
|
||||
)
|
||||
|
||||
assert skip_reason is None
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.main import setup_black_white_lists
|
||||
|
||||
def test_setup_black_white_lists():
|
||||
# Simple
|
||||
blacklist_library = 'library1, library2'
|
||||
whitelist_library = 'library1, library2'
|
||||
blacklist_library_type = 'library_type1, library_type2'
|
||||
whitelist_library_type = 'library_type1, library_type2'
|
||||
blacklist_users = 'user1, user2'
|
||||
whitelist_users = 'user1, user2'
|
||||
|
||||
results_blacklist_library, return_whitelist_library, return_blacklist_library_type, return_whitelist_library_type, return_blacklist_users, return_whitelist_users = setup_black_white_lists(blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, blacklist_users, whitelist_users)
|
||||
|
||||
assert results_blacklist_library == ['library1', 'library2']
|
||||
assert return_whitelist_library == ['library1', 'library2']
|
||||
assert return_blacklist_library_type == ['library_type1', 'library_type2']
|
||||
assert return_whitelist_library_type == ['library_type1', 'library_type2']
|
||||
assert return_blacklist_users == ['user1', 'user2']
|
||||
assert return_whitelist_users == ['user1', 'user2']
|
||||
|
||||
# Library Mapping and user mapping
|
||||
library_mapping = { "library1": "library3" }
|
||||
user_mapping = { "user1": "user3" }
|
||||
|
||||
results_blacklist_library, return_whitelist_library, return_blacklist_library_type, return_whitelist_library_type, return_blacklist_users, return_whitelist_users = setup_black_white_lists(blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, blacklist_users, whitelist_users, library_mapping, user_mapping)
|
||||
|
||||
assert results_blacklist_library == ['library1', 'library2', 'library3']
|
||||
assert return_whitelist_library == ['library1', 'library2', 'library3']
|
||||
assert return_blacklist_library_type == ['library_type1', 'library_type2']
|
||||
assert return_whitelist_library_type == ['library_type1', 'library_type2']
|
||||
assert return_blacklist_users == ['user1', 'user2', 'user3']
|
||||
assert return_whitelist_users == ['user1', 'user2', 'user3']
|
||||
|
|
@ -1,176 +0,0 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.main import cleanup_watched
|
||||
|
||||
tv_shows_watched_list_1 = {
|
||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
||||
"Season 1": [
|
||||
{'imdb': 'tt0550489', 'tmdb': '282843', 'tvdb': '176357', 'locations': ('Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv',)},
|
||||
{'imdb': 'tt0550487', 'tmdb': '282861', 'tvdb': '300385', 'locations': ('Criminal Minds S01E02 Compulsion WEBDL-720p.mkv',)}
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
{'locations': ('Test S01E01.mkv',)},
|
||||
{'locations': ('Test S01E02.mkv',)}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
movies_watched_list_1 = [
|
||||
{"imdb":"tt2380307", "tmdb":"354912", 'title': 'Coco', 'locations': ('Coco (2017) Remux-1080p.mkv',)},
|
||||
{"tmdbcollection":"448150", "imdb":"tt1431045", "tmdb":"293660", 'title': 'Deadpool', 'locations': ('Deadpool (2016) Remux-1080p.mkv',)},
|
||||
]
|
||||
|
||||
tv_shows_watched_list_2 = {
|
||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
||||
"Season 1": [
|
||||
{'imdb': 'tt0550487', 'tmdb': '282861', 'tvdb': '300385', 'locations': ('Criminal Minds S01E02 Compulsion WEBDL-720p.mkv',)},
|
||||
{'imdb': 'tt0550498', 'tmdb': '282865', 'tvdb': '300474', 'locations': ("Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",)}
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
{'locations': ('Test S01E02.mkv',)},
|
||||
{'locations': ('Test S01E03.mkv',)}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
movies_watched_list_2 = [
|
||||
{"imdb":"tt2380307", "tmdb":"354912", 'title': 'Coco', 'locations': ('Coco (2017) Remux-1080p.mkv',)},
|
||||
{'imdb': 'tt0384793', 'tmdb': '9788', 'tvdb': '9103', 'title': 'Accepted', 'locations': ('Accepted (2006) Remux-1080p.mkv',)}
|
||||
]
|
||||
|
||||
# Test to see if objects get deleted all the way up to the root.
|
||||
tv_shows_2_watched_list_1 = {
|
||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
||||
"Season 1": [
|
||||
{'imdb': 'tt0550489', 'tmdb': '282843', 'tvdb': '176357', 'locations': ('Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv',)},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
expected_tv_show_watched_list_1 = {
|
||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
||||
"Season 1": [
|
||||
{'imdb': 'tt0550489', 'tmdb': '282843', 'tvdb': '176357', 'locations': ('Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv',)}
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
{'locations': ('Test S01E01.mkv',)}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
expected_movie_watched_list_1 = [
|
||||
{"tmdbcollection":"448150", "imdb":"tt1431045", "tmdb":"293660", 'title': 'Deadpool', 'locations': ('Deadpool (2016) Remux-1080p.mkv',)}
|
||||
]
|
||||
|
||||
expected_tv_show_watched_list_2 = {
|
||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
||||
"Season 1": [
|
||||
{'imdb': 'tt0550498', 'tmdb': '282865', 'tvdb': '300474', 'locations': ("Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",)}
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
{'locations': ('Test S01E03.mkv',)}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
expected_movie_watched_list_2 = [
|
||||
{'imdb': 'tt0384793', 'tmdb': '9788', 'tvdb': '9103', 'title': 'Accepted', 'locations': ('Accepted (2006) Remux-1080p.mkv',)}
|
||||
]
|
||||
|
||||
|
||||
def test_simple_cleanup_watched():
|
||||
user_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_1,
|
||||
"Movies": movies_watched_list_1,
|
||||
"Other Shows": tv_shows_2_watched_list_1
|
||||
},
|
||||
}
|
||||
user_watched_list_2 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_2,
|
||||
"Movies": movies_watched_list_2,
|
||||
"Other Shows": tv_shows_2_watched_list_1
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_1
|
||||
, "Movies": expected_movie_watched_list_1
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_2 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_2
|
||||
, "Movies": expected_movie_watched_list_2
|
||||
}
|
||||
}
|
||||
|
||||
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2)
|
||||
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1)
|
||||
|
||||
assert return_watched_list_1 == expected_watched_list_1
|
||||
assert return_watched_list_2 == expected_watched_list_2
|
||||
|
||||
|
||||
def test_mapping_cleanup_watched():
|
||||
user_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_1,
|
||||
"Movies": movies_watched_list_1,
|
||||
"Other Shows": tv_shows_2_watched_list_1
|
||||
},
|
||||
}
|
||||
user_watched_list_2 = {
|
||||
"user2": {
|
||||
"Shows": tv_shows_watched_list_2,
|
||||
"Movies": movies_watched_list_2,
|
||||
"Other Shows": tv_shows_2_watched_list_1
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_1
|
||||
, "Movies": expected_movie_watched_list_1
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_2 = {
|
||||
"user2": {
|
||||
"Shows": expected_tv_show_watched_list_2
|
||||
, "Movies": expected_movie_watched_list_2
|
||||
}
|
||||
}
|
||||
|
||||
user_mapping = { "user1": "user2" }
|
||||
library_mapping = { "TV Shows": "Shows" }
|
||||
|
||||
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2, user_mapping=user_mapping, library_mapping=library_mapping)
|
||||
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1, user_mapping=user_mapping, library_mapping=library_mapping)
|
||||
|
||||
assert return_watched_list_1 == expected_watched_list_1
|
||||
assert return_watched_list_2 == expected_watched_list_2
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.users import (
|
||||
combine_user_lists,
|
||||
filter_user_lists,
|
||||
)
|
||||
|
||||
|
||||
def test_combine_user_lists():
|
||||
server_1_users = ["test", "test3", "luigi311"]
|
||||
server_2_users = ["luigi311", "test2", "test3"]
|
||||
user_mapping = {"test2": "test"}
|
||||
|
||||
combined = combine_user_lists(server_1_users, server_2_users, user_mapping)
|
||||
|
||||
assert combined == {"luigi311": "luigi311", "test": "test2", "test3": "test3"}
|
||||
|
||||
|
||||
def test_filter_user_lists():
|
||||
users = {"luigi311": "luigi311", "test": "test2", "test3": "test3"}
|
||||
blacklist_users = ["test3"]
|
||||
whitelist_users = ["test", "luigi311"]
|
||||
|
||||
filtered = filter_user_lists(users, blacklist_users, whitelist_users)
|
||||
|
||||
assert filtered == {"test": "test2", "luigi311": "luigi311"}
|
||||
|
|
@ -0,0 +1,724 @@
|
|||
from datetime import datetime
|
||||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.watched import (
|
||||
LibraryData,
|
||||
MediaIdentifiers,
|
||||
MediaItem,
|
||||
Series,
|
||||
UserData,
|
||||
WatchedStatus,
|
||||
cleanup_watched,
|
||||
)
|
||||
|
||||
viewed_date = datetime.today()
|
||||
|
||||
tv_shows_watched_list_1: list[Series] = [
|
||||
Series(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Doctor Who (2005)",
|
||||
locations=("Doctor Who (2005) {tvdb-78804} {imdb-tt0436992}",),
|
||||
imdb_id="tt0436992",
|
||||
tmdb_id="57243",
|
||||
tvdb_id="78804",
|
||||
),
|
||||
episodes=[
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="The Unquiet Dead",
|
||||
locations=("S01E03.mkv",),
|
||||
imdb_id="tt0563001",
|
||||
tmdb_id="968589",
|
||||
tvdb_id="295296",
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Aliens of London (1)",
|
||||
locations=("S01E04.mkv",),
|
||||
imdb_id="tt0562985",
|
||||
tmdb_id="968590",
|
||||
tvdb_id="295297",
|
||||
),
|
||||
status=WatchedStatus(
|
||||
completed=False, time=240000, viewed_date=viewed_date
|
||||
),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="World War Three (2)",
|
||||
locations=("S01E05.mkv",),
|
||||
imdb_id="tt0563003",
|
||||
tmdb_id="968592",
|
||||
tvdb_id="295298",
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
],
|
||||
),
|
||||
Series(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Monarch: Legacy of Monsters",
|
||||
locations=("Monarch - Legacy of Monsters {tvdb-422598} {imdb-tt17220216}",),
|
||||
imdb_id="tt17220216",
|
||||
tmdb_id="202411",
|
||||
tvdb_id="422598",
|
||||
),
|
||||
episodes=[
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Secrets and Lies",
|
||||
locations=("S01E03.mkv",),
|
||||
imdb_id="tt21255044",
|
||||
tmdb_id="4661246",
|
||||
tvdb_id="10009418",
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Parallels and Interiors",
|
||||
locations=("S01E04.mkv",),
|
||||
imdb_id="tt21255050",
|
||||
tmdb_id="4712059",
|
||||
tvdb_id="10009419",
|
||||
),
|
||||
status=WatchedStatus(
|
||||
completed=False, time=240000, viewed_date=viewed_date
|
||||
),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="The Way Out",
|
||||
locations=("S01E05.mkv",),
|
||||
imdb_id="tt23787572",
|
||||
tmdb_id="4712061",
|
||||
tvdb_id="10009420",
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
],
|
||||
),
|
||||
Series(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="My Adventures with Superman",
|
||||
locations=("My Adventures with Superman {tvdb-403172} {imdb-tt14681924}",),
|
||||
imdb_id="tt14681924",
|
||||
tmdb_id="125928",
|
||||
tvdb_id="403172",
|
||||
),
|
||||
episodes=[
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Adventures of a Normal Man (1)",
|
||||
locations=("S01E01.mkv",),
|
||||
imdb_id="tt15699926",
|
||||
tmdb_id="3070048",
|
||||
tvdb_id="8438181",
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Adventures of a Normal Man (2)",
|
||||
locations=("S01E02.mkv",),
|
||||
imdb_id="tt20413322",
|
||||
tmdb_id="4568681",
|
||||
tvdb_id="9829910",
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="My Interview with Superman",
|
||||
locations=("S01E03.mkv",),
|
||||
imdb_id="tt20413328",
|
||||
tmdb_id="4497012",
|
||||
tvdb_id="9870382",
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# TV Shows Watched list 2
|
||||
|
||||
tv_shows_watched_list_2: list[Series] = [
|
||||
Series(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Doctor Who",
|
||||
locations=("Doctor Who (2005) {tvdb-78804} {imdb-tt0436992}",),
|
||||
imdb_id="tt0436992",
|
||||
tmdb_id="57243",
|
||||
tvdb_id="78804",
|
||||
),
|
||||
episodes=[
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Rose",
|
||||
locations=("S01E01.mkv",),
|
||||
imdb_id="tt0562992",
|
||||
tvdb_id="295294",
|
||||
tmdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="The End of the World",
|
||||
locations=("S01E02.mkv",),
|
||||
imdb_id="tt0562997",
|
||||
tvdb_id="295295",
|
||||
tmdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(
|
||||
completed=False, time=300670, viewed_date=viewed_date
|
||||
),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="World War Three (2)",
|
||||
locations=("S01E05.mkv",),
|
||||
imdb_id="tt0563003",
|
||||
tvdb_id="295298",
|
||||
tmdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
],
|
||||
),
|
||||
Series(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Monarch: Legacy of Monsters",
|
||||
locations=("Monarch - Legacy of Monsters {tvdb-422598} {imdb-tt17220216}",),
|
||||
imdb_id="tt17220216",
|
||||
tmdb_id="202411",
|
||||
tvdb_id="422598",
|
||||
),
|
||||
episodes=[
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Aftermath",
|
||||
locations=("S01E01.mkv",),
|
||||
imdb_id="tt20412166",
|
||||
tvdb_id="9959300",
|
||||
tmdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Departure",
|
||||
locations=("S01E02.mkv",),
|
||||
imdb_id="tt22866594",
|
||||
tvdb_id="10009417",
|
||||
tmdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(
|
||||
completed=False, time=300741, viewed_date=viewed_date
|
||||
),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="The Way Out",
|
||||
locations=("S01E05.mkv",),
|
||||
imdb_id="tt23787572",
|
||||
tvdb_id="10009420",
|
||||
tmdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
],
|
||||
),
|
||||
Series(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="My Adventures with Superman",
|
||||
locations=("My Adventures with Superman {tvdb-403172} {imdb-tt14681924}",),
|
||||
imdb_id="tt14681924",
|
||||
tmdb_id="125928",
|
||||
tvdb_id="403172",
|
||||
),
|
||||
episodes=[
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Adventures of a Normal Man (1)",
|
||||
locations=("S01E01.mkv",),
|
||||
imdb_id="tt15699926",
|
||||
tvdb_id="8438181",
|
||||
tmdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Adventures of a Normal Man (2)",
|
||||
locations=("S01E02.mkv",),
|
||||
imdb_id="tt20413322",
|
||||
tvdb_id="9829910",
|
||||
tmdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="My Interview with Superman",
|
||||
locations=("S01E03.mkv",),
|
||||
imdb_id="tt20413328",
|
||||
tvdb_id="9870382",
|
||||
tmdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Expected TV Shows Watched list 1 (after cleanup)
|
||||
|
||||
expected_tv_show_watched_list_1: list[Series] = [
|
||||
Series(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Doctor Who (2005)",
|
||||
locations=("Doctor Who (2005) {tvdb-78804} {imdb-tt0436992}",),
|
||||
imdb_id="tt0436992",
|
||||
tmdb_id="57243",
|
||||
tvdb_id="78804",
|
||||
),
|
||||
episodes=[
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="The Unquiet Dead",
|
||||
locations=("S01E03.mkv",),
|
||||
imdb_id="tt0563001",
|
||||
tmdb_id="968589",
|
||||
tvdb_id="295296",
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Aliens of London (1)",
|
||||
locations=("S01E04.mkv",),
|
||||
imdb_id="tt0562985",
|
||||
tmdb_id="968590",
|
||||
tvdb_id="295297",
|
||||
),
|
||||
status=WatchedStatus(
|
||||
completed=False, time=240000, viewed_date=viewed_date
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
Series(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Monarch: Legacy of Monsters",
|
||||
locations=("Monarch - Legacy of Monsters {tvdb-422598} {imdb-tt17220216}",),
|
||||
imdb_id="tt17220216",
|
||||
tmdb_id="202411",
|
||||
tvdb_id="422598",
|
||||
),
|
||||
episodes=[
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Secrets and Lies",
|
||||
locations=("S01E03.mkv",),
|
||||
imdb_id="tt21255044",
|
||||
tmdb_id="4661246",
|
||||
tvdb_id="10009418",
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Parallels and Interiors",
|
||||
locations=("S01E04.mkv",),
|
||||
imdb_id="tt21255050",
|
||||
tmdb_id="4712059",
|
||||
tvdb_id="10009419",
|
||||
),
|
||||
status=WatchedStatus(
|
||||
completed=False, time=240000, viewed_date=viewed_date
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Expected TV Shows Watched list 2 (after cleanup)
|
||||
|
||||
expected_tv_show_watched_list_2: list[Series] = [
|
||||
Series(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Doctor Who",
|
||||
locations=("Doctor Who (2005) {tvdb-78804} {imdb-tt0436992}",),
|
||||
imdb_id="tt0436992",
|
||||
tmdb_id="57243",
|
||||
tvdb_id="78804",
|
||||
),
|
||||
episodes=[
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Rose",
|
||||
locations=("S01E01.mkv",),
|
||||
imdb_id="tt0562992",
|
||||
tvdb_id="295294",
|
||||
tmdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="The End of the World",
|
||||
locations=("S01E02.mkv",),
|
||||
imdb_id="tt0562997",
|
||||
tvdb_id="295295",
|
||||
tmdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(
|
||||
completed=False, time=300670, viewed_date=viewed_date
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
Series(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Monarch: Legacy of Monsters",
|
||||
locations=("Monarch - Legacy of Monsters {tvdb-422598} {imdb-tt17220216}",),
|
||||
imdb_id="tt17220216",
|
||||
tmdb_id="202411",
|
||||
tvdb_id="422598",
|
||||
),
|
||||
episodes=[
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Aftermath",
|
||||
locations=("S01E01.mkv",),
|
||||
imdb_id="tt20412166",
|
||||
tvdb_id="9959300",
|
||||
tmdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Departure",
|
||||
locations=("S01E02.mkv",),
|
||||
imdb_id="tt22866594",
|
||||
tvdb_id="10009417",
|
||||
tmdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(
|
||||
completed=False, time=300741, viewed_date=viewed_date
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Movies Watched list 1
|
||||
|
||||
movies_watched_list_1: list[MediaItem] = [
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Big Buck Bunny",
|
||||
locations=("Big Buck Bunny.mkv",),
|
||||
imdb_id="tt1254207",
|
||||
tmdb_id="10378",
|
||||
tvdb_id="12352",
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="The Family Plan",
|
||||
locations=("The Family Plan (2023).mkv",),
|
||||
imdb_id="tt16431870",
|
||||
tmdb_id="1029575",
|
||||
tvdb_id="351194",
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Killers of the Flower Moon",
|
||||
locations=("Killers of the Flower Moon (2023).mkv",),
|
||||
imdb_id="tt5537002",
|
||||
tmdb_id="466420",
|
||||
tvdb_id="135852",
|
||||
),
|
||||
status=WatchedStatus(completed=False, time=240000, viewed_date=viewed_date),
|
||||
),
|
||||
]
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Movies Watched list 2
|
||||
|
||||
movies_watched_list_2: list[MediaItem] = [
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="The Family Plan",
|
||||
locations=("The Family Plan (2023).mkv",),
|
||||
imdb_id="tt16431870",
|
||||
tmdb_id="1029575",
|
||||
tvdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Five Nights at Freddy's",
|
||||
locations=("Five Nights at Freddy's (2023).mkv",),
|
||||
imdb_id="tt4589218",
|
||||
tmdb_id="507089",
|
||||
tvdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="The Hunger Games: The Ballad of Songbirds & Snakes",
|
||||
locations=("The Hunger Games The Ballad of Songbirds & Snakes (2023).mkv",),
|
||||
imdb_id="tt10545296",
|
||||
tmdb_id="695721",
|
||||
tvdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(completed=False, time=301215, viewed_date=viewed_date),
|
||||
),
|
||||
]
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Expected Movies Watched list 1
|
||||
|
||||
expected_movie_watched_list_1: list[MediaItem] = [
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Big Buck Bunny",
|
||||
locations=("Big Buck Bunny.mkv",),
|
||||
imdb_id="tt1254207",
|
||||
tmdb_id="10378",
|
||||
tvdb_id="12352",
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Killers of the Flower Moon",
|
||||
locations=("Killers of the Flower Moon (2023).mkv",),
|
||||
imdb_id="tt5537002",
|
||||
tmdb_id="466420",
|
||||
tvdb_id="135852",
|
||||
),
|
||||
status=WatchedStatus(completed=False, time=240000, viewed_date=viewed_date),
|
||||
),
|
||||
]
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Expected Movies Watched list 2
|
||||
|
||||
expected_movie_watched_list_2: list[MediaItem] = [
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Five Nights at Freddy's",
|
||||
locations=("Five Nights at Freddy's (2023).mkv",),
|
||||
imdb_id="tt4589218",
|
||||
tmdb_id="507089",
|
||||
tvdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
),
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="The Hunger Games: The Ballad of Songbirds & Snakes",
|
||||
locations=("The Hunger Games The Ballad of Songbirds & Snakes (2023).mkv",),
|
||||
imdb_id="tt10545296",
|
||||
tmdb_id="695721",
|
||||
tvdb_id=None,
|
||||
),
|
||||
status=WatchedStatus(completed=False, time=301215, viewed_date=viewed_date),
|
||||
),
|
||||
]
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# TV Shows 2 Watched list 1 (for testing deletion up to the root)
|
||||
# Here we use a single Series entry for "Criminal Minds"
|
||||
|
||||
tv_shows_2_watched_list_1: list[Series] = [
|
||||
Series(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Criminal Minds",
|
||||
locations=("Criminal Minds",),
|
||||
imdb_id="tt0452046",
|
||||
tmdb_id="4057",
|
||||
tvdb_id="75710",
|
||||
),
|
||||
episodes=[
|
||||
MediaItem(
|
||||
identifiers=MediaIdentifiers(
|
||||
title="Extreme Aggressor",
|
||||
locations=(
|
||||
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||
),
|
||||
imdb_id="tt0550489",
|
||||
tmdb_id="282843",
|
||||
tvdb_id="176357",
|
||||
),
|
||||
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def test_simple_cleanup_watched():
|
||||
user_watched_list_1: dict[str, UserData] = {
|
||||
"user1": UserData(
|
||||
libraries={
|
||||
"TV Shows": LibraryData(
|
||||
title="TV Shows",
|
||||
movies=[],
|
||||
series=tv_shows_watched_list_1,
|
||||
),
|
||||
"Movies": LibraryData(
|
||||
title="Movies",
|
||||
movies=movies_watched_list_1,
|
||||
series=[],
|
||||
),
|
||||
"Other Shows": LibraryData(
|
||||
title="Other Shows",
|
||||
movies=[],
|
||||
series=tv_shows_2_watched_list_1,
|
||||
),
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
user_watched_list_2: dict[str, UserData] = {
|
||||
"user1": UserData(
|
||||
libraries={
|
||||
"TV Shows": LibraryData(
|
||||
title="TV Shows",
|
||||
movies=[],
|
||||
series=tv_shows_watched_list_2,
|
||||
),
|
||||
"Movies": LibraryData(
|
||||
title="Movies",
|
||||
movies=movies_watched_list_2,
|
||||
series=[],
|
||||
),
|
||||
"Other Shows": LibraryData(
|
||||
title="Other Shows",
|
||||
movies=[],
|
||||
series=tv_shows_2_watched_list_1,
|
||||
),
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
expected_watched_list_1: dict[str, UserData] = {
|
||||
"user1": UserData(
|
||||
libraries={
|
||||
"TV Shows": LibraryData(
|
||||
title="TV Shows",
|
||||
movies=[],
|
||||
series=expected_tv_show_watched_list_1,
|
||||
),
|
||||
"Movies": LibraryData(
|
||||
title="Movies",
|
||||
movies=expected_movie_watched_list_1,
|
||||
series=[],
|
||||
),
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
expected_watched_list_2: dict[str, UserData] = {
|
||||
"user1": UserData(
|
||||
libraries={
|
||||
"TV Shows": LibraryData(
|
||||
title="TV Shows",
|
||||
movies=[],
|
||||
series=expected_tv_show_watched_list_2,
|
||||
),
|
||||
"Movies": LibraryData(
|
||||
title="Movies",
|
||||
movies=expected_movie_watched_list_2,
|
||||
series=[],
|
||||
),
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2)
|
||||
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1)
|
||||
|
||||
assert return_watched_list_1 == expected_watched_list_1
|
||||
assert return_watched_list_2 == expected_watched_list_2
|
||||
|
||||
|
||||
# def test_mapping_cleanup_watched():
|
||||
# user_watched_list_1 = {
|
||||
# "user1": {
|
||||
# "TV Shows": tv_shows_watched_list_1,
|
||||
# "Movies": movies_watched_list_1,
|
||||
# "Other Shows": tv_shows_2_watched_list_1,
|
||||
# },
|
||||
# }
|
||||
# user_watched_list_2 = {
|
||||
# "user2": {
|
||||
# "Shows": tv_shows_watched_list_2,
|
||||
# "Movies": movies_watched_list_2,
|
||||
# "Other Shows": tv_shows_2_watched_list_1,
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# expected_watched_list_1 = {
|
||||
# "user1": {
|
||||
# "TV Shows": expected_tv_show_watched_list_1,
|
||||
# "Movies": expected_movie_watched_list_1,
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# expected_watched_list_2 = {
|
||||
# "user2": {
|
||||
# "Shows": expected_tv_show_watched_list_2,
|
||||
# "Movies": expected_movie_watched_list_2,
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# user_mapping = {"user1": "user2"}
|
||||
# library_mapping = {"TV Shows": "Shows"}
|
||||
#
|
||||
# return_watched_list_1 = cleanup_watched(
|
||||
# user_watched_list_1,
|
||||
# user_watched_list_2,
|
||||
# user_mapping=user_mapping,
|
||||
# library_mapping=library_mapping,
|
||||
# )
|
||||
# return_watched_list_2 = cleanup_watched(
|
||||
# user_watched_list_2,
|
||||
# user_watched_list_1,
|
||||
# user_mapping=user_mapping,
|
||||
# library_mapping=library_mapping,
|
||||
# )
|
||||
#
|
||||
# assert return_watched_list_1 == expected_watched_list_1
|
||||
# assert return_watched_list_2 == expected_watched_list_2
|
||||
|
|
@ -0,0 +1,217 @@
|
|||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from loguru import logger
|
||||
from collections import Counter
|
||||
|
||||
|
||||
class MarkLogError(Exception):
|
||||
"""Custom exception for mark.log validation failures."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Check the mark.log file that is generated by the CI to make sure it contains the expected values"
|
||||
)
|
||||
group = parser.add_mutually_exclusive_group(required=True)
|
||||
group.add_argument(
|
||||
"--guids", action="store_true", help="Check the mark.log file for guids"
|
||||
)
|
||||
group.add_argument(
|
||||
"--locations", action="store_true", help="Check the mark.log file for locations"
|
||||
)
|
||||
group.add_argument(
|
||||
"--write", action="store_true", help="Check the mark.log file for write-run"
|
||||
)
|
||||
group.add_argument(
|
||||
"--plex", action="store_true", help="Check the mark.log file for Plex"
|
||||
)
|
||||
group.add_argument(
|
||||
"--jellyfin", action="store_true", help="Check the mark.log file for Jellyfin"
|
||||
)
|
||||
group.add_argument(
|
||||
"--emby", action="store_true", help="Check the mark.log file for Emby"
|
||||
)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def read_marklog():
|
||||
marklog = os.path.join(os.getcwd(), "mark.log")
|
||||
try:
|
||||
with open(marklog, "r") as f:
|
||||
lines = [line.strip() for line in f if line.strip()]
|
||||
return lines
|
||||
except Exception as e:
|
||||
raise MarkLogError(f"Error reading {marklog}: {e}")
|
||||
|
||||
|
||||
def check_marklog(lines, expected_values):
|
||||
found_counter = Counter(lines)
|
||||
expected_counter = Counter(expected_values)
|
||||
|
||||
# Determine missing and extra items by comparing counts
|
||||
missing = expected_counter - found_counter
|
||||
extra = found_counter - expected_counter
|
||||
|
||||
if missing or extra:
|
||||
if missing:
|
||||
logger.error("Missing expected entries (with counts):")
|
||||
for entry, count in missing.items():
|
||||
logger.error(f" {entry}: missing {count} time(s)")
|
||||
if extra:
|
||||
logger.error("Unexpected extra entries found (with counts):")
|
||||
for entry, count in extra.items():
|
||||
logger.error(f" {entry}: found {count} extra time(s)")
|
||||
|
||||
logger.error(
|
||||
f"Entry count mismatch: found {len(lines)} entries, expected {len(expected_values)} entries."
|
||||
)
|
||||
logger.error("Full mark.log content:")
|
||||
for line in sorted(lines):
|
||||
logger.error(f" {line}")
|
||||
raise MarkLogError("mark.log validation failed.")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
# Expected values defined for each check
|
||||
expected_jellyfin = [
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom Movies/Movie Two (2021)",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom TV Shows/Greatest Show Ever 3000/Episode 2",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Movies/Five Nights at Freddy's",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Movies/The Hunger Games: The Ballad of Songbirds & Snakes/301215",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Doctor Who (2005)/Rose",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Doctor Who (2005)/The End of the World/300670",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Aftermath",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Departure/300741",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom Movies/Movie Two",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom TV Shows/Greatest Show Ever (3000)/S01E02",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/The Family Plan",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/Five Nights at Freddy's",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/The Hunger Games: The Ballad of Songbirds & Snakes/5",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/Rose",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/The End of the World/5",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Departure/5",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/The Way Out",
|
||||
]
|
||||
expected_emby = [
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom Movies/Movie Three (2022)",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom TV Shows/Greatest Show Ever 3000/Episode 3",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Movies/Tears of Steel",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Aftermath",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Parallels and Interiors/240429",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom Movies/Movie Three (2022)",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom TV Shows/Greatest Show Ever (3000)/S01E03",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Tears of Steel",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Parallels and Interiors/4",
|
||||
]
|
||||
expected_plex = [
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Big Buck Bunny",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Killers of the Flower Moon/4",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom TV Shows/Greatest Show Ever (3000)/S01E01",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Doctor Who/The Unquiet Dead",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Doctor Who/Aliens of London (1)/4",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Secrets and Lies",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Parallels and Interiors/4",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom Movies/Movie One (2020)",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/Big Buck Bunny",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/The Family Plan",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/Killers of the Flower Moon/4",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom TV Shows/Greatest Show Ever (3000)/S01E01",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/The Unquiet Dead",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/Aliens of London (1)/4",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Secrets and Lies",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/The Way Out",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom Movies/Movie One",
|
||||
]
|
||||
|
||||
expected_locations = expected_emby + expected_plex + expected_jellyfin
|
||||
# Remove Custom Movies/TV Shows as they should not have guids
|
||||
expected_guids = [item for item in expected_locations if "Custom" not in item]
|
||||
|
||||
expected_write = [
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom Movies/Movie Two (2021)",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom TV Shows/Greatest Show Ever 3000/Episode 2",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Movies/Five Nights at Freddy's",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Movies/The Hunger Games: The Ballad of Songbirds & Snakes/301215",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Doctor Who (2005)/Rose",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Doctor Who (2005)/The End of the World/300670",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Aftermath",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Departure/300741",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Big Buck Bunny",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Killers of the Flower Moon/4",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom TV Shows/Greatest Show Ever (3000)/S01E01",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Doctor Who/The Unquiet Dead",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Doctor Who/Aliens of London (1)/4",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Secrets and Lies",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Parallels and Interiors/4",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom Movies/Movie One (2020)",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom Movies/Movie Three (2022)",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Custom TV Shows/Greatest Show Ever 3000/Episode 3",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/Movies/Tears of Steel",
|
||||
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Parallels and Interiors/240429",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/Big Buck Bunny",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/The Family Plan",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/Five Nights at Freddy's",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/The Hunger Games: The Ballad of Songbirds & Snakes/5",
|
||||
"Emby/Emby-Server/jellyplex_watched/Movies/Killers of the Flower Moon/4",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom TV Shows/Greatest Show Ever (3000)/S01E01",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom TV Shows/Greatest Show Ever (3000)/S01E02",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/Rose",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/The End of the World/5",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/The Unquiet Dead",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/Aliens of London (1)/4",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Departure/5",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Secrets and Lies",
|
||||
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/The Way Out",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom Movies/Movie One",
|
||||
"Emby/Emby-Server/jellyplex_watched/Custom Movies/Movie Two",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom Movies/Movie Three (2022)",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Custom TV Shows/Greatest Show Ever (3000)/S01E03",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Tears of Steel",
|
||||
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Parallels and Interiors/4",
|
||||
]
|
||||
|
||||
# Determine which expected values to use based on the command-line flag
|
||||
if args.guids:
|
||||
expected_values = expected_guids
|
||||
check_type = "GUIDs"
|
||||
elif args.locations:
|
||||
expected_values = expected_locations
|
||||
check_type = "locations"
|
||||
elif args.write:
|
||||
expected_values = expected_write
|
||||
check_type = "write-run"
|
||||
elif args.plex:
|
||||
expected_values = expected_plex
|
||||
check_type = "Plex"
|
||||
elif args.jellyfin:
|
||||
expected_values = expected_jellyfin
|
||||
check_type = "Jellyfin"
|
||||
elif args.emby:
|
||||
expected_values = expected_emby
|
||||
check_type = "Emby"
|
||||
else:
|
||||
raise MarkLogError("No server specified")
|
||||
|
||||
logger.info(f"Validating mark.log for {check_type}...")
|
||||
|
||||
try:
|
||||
lines = read_marklog()
|
||||
check_marklog(lines, expected_values)
|
||||
except MarkLogError as e:
|
||||
logger.error(e)
|
||||
sys.exit(1)
|
||||
|
||||
logger.success("Successfully validated mark.log")
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -0,0 +1,407 @@
|
|||
version = 1
|
||||
revision = 2
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
version = "0.7.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.8.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.4.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jellyplex-watched"
|
||||
version = "8.3.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "loguru" },
|
||||
{ name = "packaging" },
|
||||
{ name = "plexapi" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "mypy" },
|
||||
{ name = "pytest" },
|
||||
{ name = "types-requests" },
|
||||
]
|
||||
lint = [
|
||||
{ name = "ruff" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "loguru", specifier = ">=0.7.3" },
|
||||
{ name = "packaging", specifier = "==25.0" },
|
||||
{ name = "plexapi", specifier = "==4.17.1" },
|
||||
{ name = "pydantic", specifier = "==2.11.7" },
|
||||
{ name = "python-dotenv", specifier = "==1.1.1" },
|
||||
{ name = "requests", specifier = "==2.32.5" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "mypy", specifier = ">=1.16.1" },
|
||||
{ name = "pytest", specifier = ">=8.4.1" },
|
||||
{ name = "types-requests", specifier = ">=2.32.0.20250611" },
|
||||
]
|
||||
lint = [{ name = "ruff", specifier = ">=0.12.3" }]
|
||||
|
||||
[[package]]
|
||||
name = "loguru"
|
||||
version = "0.7.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "win32-setctime", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "1.18.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "mypy-extensions" },
|
||||
{ name = "pathspec" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/14/a3/931e09fc02d7ba96da65266884da4e4a8806adcdb8a57faaacc6edf1d538/mypy-1.18.1.tar.gz", hash = "sha256:9e988c64ad3ac5987f43f5154f884747faf62141b7f842e87465b45299eea5a9", size = 3448447, upload-time = "2025-09-11T23:00:47.067Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/14/1c3f54d606cb88a55d1567153ef3a8bc7b74702f2ff5eb64d0994f9e49cb/mypy-1.18.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:502cde8896be8e638588b90fdcb4c5d5b8c1b004dfc63fd5604a973547367bb9", size = 12911082, upload-time = "2025-09-11T23:00:41.465Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/83/235606c8b6d50a8eba99773add907ce1d41c068edb523f81eb0d01603a83/mypy-1.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7509549b5e41be279afc1228242d0e397f1af2919a8f2877ad542b199dc4083e", size = 11919107, upload-time = "2025-09-11T22:58:40.903Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/25/4e2ce00f8d15b99d0c68a2536ad63e9eac033f723439ef80290ec32c1ff5/mypy-1.18.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5956ecaabb3a245e3f34100172abca1507be687377fe20e24d6a7557e07080e2", size = 12472551, upload-time = "2025-09-11T22:58:37.272Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/bb/92642a9350fc339dd9dcefcf6862d171b52294af107d521dce075f32f298/mypy-1.18.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8750ceb014a96c9890421c83f0db53b0f3b8633e2864c6f9bc0a8e93951ed18d", size = 13340554, upload-time = "2025-09-11T22:59:38.756Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/ee/38d01db91c198fb6350025d28f9719ecf3c8f2c55a0094bfbf3ef478cc9a/mypy-1.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fb89ea08ff41adf59476b235293679a6eb53a7b9400f6256272fb6029bec3ce5", size = 13530933, upload-time = "2025-09-11T22:59:20.228Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/8d/6d991ae631f80d58edbf9d7066e3f2a96e479dca955d9a968cd6e90850a3/mypy-1.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:2657654d82fcd2a87e02a33e0d23001789a554059bbf34702d623dafe353eabf", size = 9828426, upload-time = "2025-09-11T23:00:21.007Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/ec/ef4a7260e1460a3071628a9277a7579e7da1b071bc134ebe909323f2fbc7/mypy-1.18.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d70d2b5baf9b9a20bc9c730015615ae3243ef47fb4a58ad7b31c3e0a59b5ef1f", size = 12918671, upload-time = "2025-09-11T22:58:29.814Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/82/0ea6c3953f16223f0b8eda40c1aeac6bd266d15f4902556ae6e91f6fca4c/mypy-1.18.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b8367e33506300f07a43012fc546402f283c3f8bcff1dc338636affb710154ce", size = 11913023, upload-time = "2025-09-11T23:00:29.049Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/ef/5e2057e692c2690fc27b3ed0a4dbde4388330c32e2576a23f0302bc8358d/mypy-1.18.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:913f668ec50c3337b89df22f973c1c8f0b29ee9e290a8b7fe01cc1ef7446d42e", size = 12473355, upload-time = "2025-09-11T23:00:04.544Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/43/b7e429fc4be10e390a167b0cd1810d41cb4e4add4ae50bab96faff695a3b/mypy-1.18.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a0e70b87eb27b33209fa4792b051c6947976f6ab829daa83819df5f58330c71", size = 13346944, upload-time = "2025-09-11T22:58:23.024Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/4e/899dba0bfe36bbd5b7c52e597de4cf47b5053d337b6d201a30e3798e77a6/mypy-1.18.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c378d946e8a60be6b6ede48c878d145546fb42aad61df998c056ec151bf6c746", size = 13512574, upload-time = "2025-09-11T22:59:52.152Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/f8/7661021a5b0e501b76440454d786b0f01bb05d5c4b125fcbda02023d0250/mypy-1.18.1-cp313-cp313-win_amd64.whl", hash = "sha256:2cd2c1e0f3a7465f22731987fff6fc427e3dcbb4ca5f7db5bbeaff2ff9a31f6d", size = 9837684, upload-time = "2025-09-11T22:58:44.454Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/87/7b173981466219eccc64c107cf8e5ab9eb39cc304b4c07df8e7881533e4f/mypy-1.18.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ba24603c58e34dd5b096dfad792d87b304fc6470cbb1c22fd64e7ebd17edcc61", size = 12900265, upload-time = "2025-09-11T22:59:03.4Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/cc/b10e65bae75b18a5ac8f81b1e8e5867677e418f0dd2c83b8e2de9ba96ebd/mypy-1.18.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ed36662fb92ae4cb3cacc682ec6656208f323bbc23d4b08d091eecfc0863d4b5", size = 11942890, upload-time = "2025-09-11T23:00:00.607Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/d4/aeefa07c44d09f4c2102e525e2031bc066d12e5351f66b8a83719671004d/mypy-1.18.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:040ecc95e026f71a9ad7956fea2724466602b561e6a25c2e5584160d3833aaa8", size = 12472291, upload-time = "2025-09-11T22:59:43.425Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/07/711e78668ff8e365f8c19735594ea95938bff3639a4c46a905e3ed8ff2d6/mypy-1.18.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:937e3ed86cb731276706e46e03512547e43c391a13f363e08d0fee49a7c38a0d", size = 13318610, upload-time = "2025-09-11T23:00:17.604Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/85/df3b2d39339c31d360ce299b418c55e8194ef3205284739b64962f6074e7/mypy-1.18.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1f95cc4f01c0f1701ca3b0355792bccec13ecb2ec1c469e5b85a6ef398398b1d", size = 13513697, upload-time = "2025-09-11T22:58:59.534Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/df/462866163c99ea73bb28f0eb4d415c087e30de5d36ee0f5429d42e28689b/mypy-1.18.1-cp314-cp314-win_amd64.whl", hash = "sha256:e4f16c0019d48941220ac60b893615be2f63afedaba6a0801bdcd041b96991ce", size = 9985739, upload-time = "2025-09-11T22:58:51.644Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/1d/4b97d3089b48ef3d904c9ca69fab044475bd03245d878f5f0b3ea1daf7ce/mypy-1.18.1-py3-none-any.whl", hash = "sha256:b76a4de66a0ac01da1be14ecc8ae88ddea33b8380284a9e3eae39d57ebcbe26e", size = 2352212, upload-time = "2025-09-11T22:59:26.576Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy-extensions"
|
||||
version = "1.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "25.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pathspec"
|
||||
version = "0.12.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "plexapi"
|
||||
version = "4.17.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "requests" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2a/02/1bebd67c3cd94a45f6c3520da971791b66457535c9771d8e0068746d7bc2/plexapi-4.17.1.tar.gz", hash = "sha256:1e5bfb486bb150e058a80ff4fb9aff9e3efce644c56d52bb5297272e005d8241", size = 154746, upload-time = "2025-08-26T00:11:02.819Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/1c/9fdaa0e1f797dde3c3cb56d7b222109009f70380e7f49fc0ff42d5705409/plexapi-4.17.1-py3-none-any.whl", hash = "sha256:9d51adb112a2b0b7aa91a928c8b5c0dfffc0d51108cea67d86fea08cee06c998", size = 166861, upload-time = "2025-08-26T00:11:00.89Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.6.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.11.7"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-types" },
|
||||
{ name = "pydantic-core" },
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "typing-inspection" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.33.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.4.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "iniconfig" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-dotenv"
|
||||
version = "1.1.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "charset-normalizer" },
|
||||
{ name = "idna" },
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.13.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6e/1a/1f4b722862840295bcaba8c9e5261572347509548faaa99b2d57ee7bfe6a/ruff-0.13.0.tar.gz", hash = "sha256:5b4b1ee7eb35afae128ab94459b13b2baaed282b1fb0f472a73c82c996c8ae60", size = 5372863, upload-time = "2025-09-10T16:25:37.917Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/fe/6f87b419dbe166fd30a991390221f14c5b68946f389ea07913e1719741e0/ruff-0.13.0-py3-none-linux_armv6l.whl", hash = "sha256:137f3d65d58ee828ae136a12d1dc33d992773d8f7644bc6b82714570f31b2004", size = 12187826, upload-time = "2025-09-10T16:24:39.5Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/25/c92296b1fc36d2499e12b74a3fdb230f77af7bdf048fad7b0a62e94ed56a/ruff-0.13.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:21ae48151b66e71fd111b7d79f9ad358814ed58c339631450c66a4be33cc28b9", size = 12933428, upload-time = "2025-09-10T16:24:43.866Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/cf/40bc7221a949470307d9c35b4ef5810c294e6cfa3caafb57d882731a9f42/ruff-0.13.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:64de45f4ca5441209e41742d527944635a05a6e7c05798904f39c85bafa819e3", size = 12095543, upload-time = "2025-09-10T16:24:46.638Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/03/8b5ff2a211efb68c63a1d03d157e924997ada87d01bebffbd13a0f3fcdeb/ruff-0.13.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2c653ae9b9d46e0ef62fc6fbf5b979bda20a0b1d2b22f8f7eb0cde9f4963b8", size = 12312489, upload-time = "2025-09-10T16:24:49.556Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/fc/2336ef6d5e9c8d8ea8305c5f91e767d795cd4fc171a6d97ef38a5302dadc/ruff-0.13.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cec632534332062bc9eb5884a267b689085a1afea9801bf94e3ba7498a2d207", size = 11991631, upload-time = "2025-09-10T16:24:53.439Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/7f/f6d574d100fca83d32637d7f5541bea2f5e473c40020bbc7fc4a4d5b7294/ruff-0.13.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd628101d9f7d122e120ac7c17e0a0f468b19bc925501dbe03c1cb7f5415b24", size = 13720602, upload-time = "2025-09-10T16:24:56.392Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/c8/a8a5b81d8729b5d1f663348d11e2a9d65a7a9bd3c399763b1a51c72be1ce/ruff-0.13.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afe37db8e1466acb173bb2a39ca92df00570e0fd7c94c72d87b51b21bb63efea", size = 14697751, upload-time = "2025-09-10T16:24:59.89Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/f5/183ec292272ce7ec5e882aea74937f7288e88ecb500198b832c24debc6d3/ruff-0.13.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f96a8d90bb258d7d3358b372905fe7333aaacf6c39e2408b9f8ba181f4b6ef2", size = 14095317, upload-time = "2025-09-10T16:25:03.025Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/8d/7f9771c971724701af7926c14dab31754e7b303d127b0d3f01116faef456/ruff-0.13.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b5e3d883e4f924c5298e3f2ee0f3085819c14f68d1e5b6715597681433f153", size = 13144418, upload-time = "2025-09-10T16:25:06.272Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/a6/7985ad1778e60922d4bef546688cd8a25822c58873e9ff30189cfe5dc4ab/ruff-0.13.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03447f3d18479df3d24917a92d768a89f873a7181a064858ea90a804a7538991", size = 13370843, upload-time = "2025-09-10T16:25:09.965Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/1c/bafdd5a7a05a50cc51d9f5711da704942d8dd62df3d8c70c311e98ce9f8a/ruff-0.13.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:fbc6b1934eb1c0033da427c805e27d164bb713f8e273a024a7e86176d7f462cf", size = 13321891, upload-time = "2025-09-10T16:25:12.969Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/3e/7817f989cb9725ef7e8d2cee74186bf90555279e119de50c750c4b7a72fe/ruff-0.13.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8ab6a3e03665d39d4a25ee199d207a488724f022db0e1fe4002968abdb8001b", size = 12119119, upload-time = "2025-09-10T16:25:16.621Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/07/9df080742e8d1080e60c426dce6e96a8faf9a371e2ce22eef662e3839c95/ruff-0.13.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2a5c62f8ccc6dd2fe259917482de7275cecc86141ee10432727c4816235bc41", size = 11961594, upload-time = "2025-09-10T16:25:19.49Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/f4/ae1185349197d26a2316840cb4d6c3fba61d4ac36ed728bf0228b222d71f/ruff-0.13.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b7b85ca27aeeb1ab421bc787009831cffe6048faae08ad80867edab9f2760945", size = 12933377, upload-time = "2025-09-10T16:25:22.371Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/39/e776c10a3b349fc8209a905bfb327831d7516f6058339a613a8d2aaecacd/ruff-0.13.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:79ea0c44a3032af768cabfd9616e44c24303af49d633b43e3a5096e009ebe823", size = 13418555, upload-time = "2025-09-10T16:25:25.681Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/09/dca8df3d48e8b3f4202bf20b1658898e74b6442ac835bfe2c1816d926697/ruff-0.13.0-py3-none-win32.whl", hash = "sha256:4e473e8f0e6a04e4113f2e1de12a5039579892329ecc49958424e5568ef4f768", size = 12141613, upload-time = "2025-09-10T16:25:28.664Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/21/0647eb71ed99b888ad50e44d8ec65d7148babc0e242d531a499a0bbcda5f/ruff-0.13.0-py3-none-win_amd64.whl", hash = "sha256:48e5c25c7a3713eea9ce755995767f4dcd1b0b9599b638b12946e892123d1efb", size = 13258250, upload-time = "2025-09-10T16:25:31.773Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/a3/03216a6a86c706df54422612981fb0f9041dbb452c3401501d4a22b942c9/ruff-0.13.0-py3-none-win_arm64.whl", hash = "sha256:ab80525317b1e1d38614addec8ac954f1b3e662de9d59114ecbf771d00cf613e", size = 12312357, upload-time = "2025-09-10T16:25:35.595Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.32.4.20250809"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ed/b0/9355adb86ec84d057fea765e4c49cce592aaf3d5117ce5609a95a7fc3dac/types_requests-2.32.4.20250809.tar.gz", hash = "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3", size = 23027, upload-time = "2025-08-09T03:17:10.664Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/6f/ec0012be842b1d888d46884ac5558fd62aeae1f0ec4f7a581433d890d4b5/types_requests-2.32.4.20250809-py3-none-any.whl", hash = "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163", size = 20644, upload-time = "2025-08-09T03:17:09.716Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.15.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-inspection"
|
||||
version = "0.4.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.5.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "win32-setctime"
|
||||
version = "1.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" },
|
||||
]
|
||||
Loading…
Reference in New Issue