commit
29e4f224dc
|
|
@ -35,13 +35,13 @@ def extract_identifiers_from_item(
|
|||
id = None
|
||||
if not title:
|
||||
id = item.get("Id")
|
||||
logger.info(f"{server_type}: Name not found for {id}")
|
||||
logger.debug(f"{server_type}: Name not found for {id}")
|
||||
|
||||
guids = {}
|
||||
if generate_guids:
|
||||
guids = {k.lower(): v for k, v in item.get("ProviderIds", {}).items()}
|
||||
if not guids:
|
||||
logger.info(
|
||||
logger.debug(
|
||||
f"{server_type}: {title if title else id} has no guids",
|
||||
)
|
||||
|
||||
|
|
@ -59,7 +59,7 @@ def extract_identifiers_from_item(
|
|||
)
|
||||
|
||||
if not locations:
|
||||
logger.info(f"{server_type}: {title if title else id} has no locations")
|
||||
logger.debug(f"{server_type}: {title if title else id} has no locations")
|
||||
|
||||
return MediaIdentifiers(
|
||||
title=title,
|
||||
|
|
@ -451,12 +451,19 @@ class JellyfinEmby:
|
|||
return LibraryData(title=library_title)
|
||||
|
||||
def get_watched(
|
||||
self, users: dict[str, str], sync_libraries: list[str]
|
||||
self,
|
||||
users: dict[str, str],
|
||||
sync_libraries: list[str],
|
||||
users_watched: dict[str, UserData] = None,
|
||||
) -> dict[str, UserData]:
|
||||
try:
|
||||
users_watched: dict[str, UserData] = {}
|
||||
if not users_watched:
|
||||
users_watched: dict[str, UserData] = {}
|
||||
|
||||
for user_name, user_id in users.items():
|
||||
if user_name.lower() not in users_watched:
|
||||
users_watched[user_name.lower()] = UserData()
|
||||
|
||||
all_libraries = self.query(f"/Users/{user_id}/Views", "get")
|
||||
if not all_libraries or not isinstance(all_libraries, dict):
|
||||
logger.debug(
|
||||
|
|
@ -465,16 +472,24 @@ class JellyfinEmby:
|
|||
continue
|
||||
|
||||
for library in all_libraries.get("Items", []):
|
||||
if library.get("Name") not in sync_libraries:
|
||||
continue
|
||||
|
||||
library_id = library.get("Id")
|
||||
library_title = library.get("Name")
|
||||
library_type = library.get("CollectionType")
|
||||
|
||||
if not library_id or not library_title or not library_type:
|
||||
logger.debug(
|
||||
f"{self.server_type}: Failed to get library data for {user_name} {library_title}"
|
||||
)
|
||||
continue
|
||||
|
||||
if library_title not in sync_libraries:
|
||||
continue
|
||||
|
||||
if library_title in users_watched:
|
||||
logger.info(
|
||||
f"{self.server_type}: {user_name} {library_title} watched history has already been gathered, skipping"
|
||||
)
|
||||
continue
|
||||
|
||||
# Get watched for user
|
||||
library_data = self.get_user_library_watched(
|
||||
|
|
|
|||
18
src/main.py
18
src/main.py
|
|
@ -18,6 +18,7 @@ from src.functions import (
|
|||
from src.users import setup_users
|
||||
from src.watched import (
|
||||
cleanup_watched,
|
||||
merge_server_watched,
|
||||
)
|
||||
from src.black_white import setup_black_white_lists
|
||||
from src.connection import generate_server_connections
|
||||
|
|
@ -178,6 +179,9 @@ def main_loop(env) -> None:
|
|||
if server_1 == servers[-1]:
|
||||
break
|
||||
|
||||
# Store a copy of server_1_watched that way it can be used multiple times without having to regather everyones watch history every single time
|
||||
server_1_watched = None
|
||||
|
||||
# Start server_2 at the next server in the list
|
||||
for server_2 in servers[servers.index(server_1) + 1 :]:
|
||||
# Check if server 1 and server 2 are going to be synced in either direction, skip if not
|
||||
|
|
@ -208,7 +212,9 @@ def main_loop(env) -> None:
|
|||
logger.info(f"Server 2 syncing libraries: {server_2_libraries}")
|
||||
|
||||
logger.info("Creating watched lists", 1)
|
||||
server_1_watched = server_1.get_watched(server_1_users, server_1_libraries)
|
||||
server_1_watched = server_1.get_watched(
|
||||
server_1_users, server_1_libraries, server_1_watched
|
||||
)
|
||||
logger.info("Finished creating watched list server 1")
|
||||
|
||||
server_2_watched = server_2.get_watched(server_2_users, server_2_libraries)
|
||||
|
|
@ -236,6 +242,16 @@ def main_loop(env) -> None:
|
|||
|
||||
if should_sync_server(env, server_2, server_1):
|
||||
logger.info(f"Syncing {server_2.info()} -> {server_1.info()}")
|
||||
|
||||
# Add server_2_watched_filtered to server_1_watched that way the stored version isn't stale for the next server
|
||||
if not dryrun:
|
||||
server_1_watched = merge_server_watched(
|
||||
server_1_watched,
|
||||
server_2_watched_filtered,
|
||||
user_mapping,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
server_1.update_watched(
|
||||
server_2_watched_filtered,
|
||||
user_mapping,
|
||||
|
|
|
|||
22
src/plex.py
22
src/plex.py
|
|
@ -279,10 +279,14 @@ class Plex:
|
|||
return LibraryData(title=library.title)
|
||||
|
||||
def get_watched(
|
||||
self, users: list[MyPlexUser | MyPlexAccount], sync_libraries: list[str]
|
||||
self,
|
||||
users: list[MyPlexUser | MyPlexAccount],
|
||||
sync_libraries: list[str],
|
||||
users_watched: dict[str, UserData] = None,
|
||||
) -> dict[str, UserData]:
|
||||
try:
|
||||
users_watched: dict[str, UserData] = {}
|
||||
if not users_watched:
|
||||
users_watched: dict[str, UserData] = {}
|
||||
|
||||
for user in users:
|
||||
if self.admin_user == user:
|
||||
|
|
@ -307,18 +311,24 @@ class Plex:
|
|||
if library.title not in sync_libraries:
|
||||
continue
|
||||
|
||||
if user_name not in users_watched:
|
||||
users_watched[user_name] = UserData()
|
||||
|
||||
if library.title in users_watched[user_name].libraries:
|
||||
logger.info(
|
||||
f"Plex: {user_name} {library.title} watched history has already been gathered, skipping"
|
||||
)
|
||||
continue
|
||||
|
||||
library_data = self.get_user_library_watched(
|
||||
user_name, user_plex, library
|
||||
)
|
||||
|
||||
if user_name not in users_watched:
|
||||
users_watched[user_name] = UserData()
|
||||
|
||||
users_watched[user_name].libraries[library.title] = library_data
|
||||
|
||||
return users_watched
|
||||
except Exception as e:
|
||||
logger.error(f"Plex: Failed to get watched, Error: {e}")
|
||||
logger.error(f"Plex: Failed to get users watched, Error: {e}")
|
||||
return {}
|
||||
|
||||
def update_user_watched(
|
||||
|
|
|
|||
105
src/watched.py
105
src/watched.py
|
|
@ -43,6 +43,111 @@ class UserData(BaseModel):
|
|||
libraries: dict[str, LibraryData] = Field(default_factory=dict)
|
||||
|
||||
|
||||
def merge_mediaitem_data(ep1: MediaItem, ep2: MediaItem) -> MediaItem:
|
||||
"""
|
||||
Merge two MediaItem episodes by comparing their watched status.
|
||||
If one is completed while the other isn't, choose the completed one.
|
||||
If both are completed or both are not, choose the one with the higher time.
|
||||
"""
|
||||
if ep1.status.completed != ep2.status.completed:
|
||||
return ep1 if ep1.status.completed else ep2
|
||||
return ep1 if ep1.status.time >= ep2.status.time else ep2
|
||||
|
||||
|
||||
def merge_series_data(series1: Series, series2: Series) -> Series:
|
||||
"""
|
||||
Merge two Series objects by combining their episodes.
|
||||
For duplicate episodes (determined by check_same_identifiers), merge their watched status.
|
||||
"""
|
||||
merged_series = copy.deepcopy(series1)
|
||||
for ep in series2.episodes:
|
||||
for idx, merged_ep in enumerate(merged_series.episodes):
|
||||
if check_same_identifiers(ep.identifiers, merged_ep.identifiers):
|
||||
merged_series.episodes[idx] = merge_mediaitem_data(merged_ep, ep)
|
||||
break
|
||||
else:
|
||||
merged_series.episodes.append(copy.deepcopy(ep))
|
||||
return merged_series
|
||||
|
||||
|
||||
def merge_library_data(lib1: LibraryData, lib2: LibraryData) -> LibraryData:
|
||||
"""
|
||||
Merge two LibraryData objects by extending movies and merging series.
|
||||
For series, duplicates are determined using check_same_identifiers.
|
||||
"""
|
||||
merged = copy.deepcopy(lib1)
|
||||
|
||||
# Merge movies.
|
||||
for movie in lib2.movies:
|
||||
for idx, merged_movie in enumerate(merged.movies):
|
||||
if check_same_identifiers(movie.identifiers, merged_movie.identifiers):
|
||||
merged.movies[idx] = merge_mediaitem_data(merged_movie, movie)
|
||||
break
|
||||
else:
|
||||
merged.movies.append(copy.deepcopy(movie))
|
||||
|
||||
# Merge series.
|
||||
for series2 in lib2.series:
|
||||
for idx, series1 in enumerate(merged.series):
|
||||
if check_same_identifiers(series1.identifiers, series2.identifiers):
|
||||
merged.series[idx] = merge_series_data(series1, series2)
|
||||
break
|
||||
else:
|
||||
merged.series.append(copy.deepcopy(series2))
|
||||
|
||||
return merged
|
||||
|
||||
|
||||
def merge_user_data(user1: UserData, user2: UserData) -> UserData:
|
||||
"""
|
||||
Merge two UserData objects by merging their libraries.
|
||||
If a library exists in both, merge its content; otherwise, add the new library.
|
||||
"""
|
||||
merged_libraries = copy.deepcopy(user1.libraries)
|
||||
for lib_key, lib_data in user2.libraries.items():
|
||||
if lib_key in merged_libraries:
|
||||
merged_libraries[lib_key] = merge_library_data(
|
||||
merged_libraries[lib_key], lib_data
|
||||
)
|
||||
else:
|
||||
merged_libraries[lib_key] = copy.deepcopy(lib_data)
|
||||
return UserData(libraries=merged_libraries)
|
||||
|
||||
|
||||
def merge_server_watched(
|
||||
watched_list_1: dict[str, UserData],
|
||||
watched_list_2: dict[str, UserData],
|
||||
user_mapping: dict[str, str] | None = None,
|
||||
library_mapping: dict[str, str] | None = None,
|
||||
) -> dict[str, UserData]:
|
||||
"""
|
||||
Merge two dictionaries of UserData while taking into account possible
|
||||
differences in user and library keys via the provided mappings.
|
||||
"""
|
||||
merged_watched = copy.deepcopy(watched_list_1)
|
||||
|
||||
for user_2, user_data in watched_list_2.items():
|
||||
# Determine matching user key.
|
||||
user_key = user_mapping.get(user_2, user_2) if user_mapping else user_2
|
||||
if user_key not in merged_watched:
|
||||
merged_watched[user_2] = copy.deepcopy(user_data)
|
||||
continue
|
||||
|
||||
for lib_key, lib_data in user_data.libraries.items():
|
||||
mapped_lib_key = (
|
||||
library_mapping.get(lib_key, lib_key) if library_mapping else lib_key
|
||||
)
|
||||
if mapped_lib_key not in merged_watched[user_key].libraries:
|
||||
merged_watched[user_key].libraries[lib_key] = copy.deepcopy(lib_data)
|
||||
else:
|
||||
merged_watched[user_key].libraries[mapped_lib_key] = merge_library_data(
|
||||
merged_watched[user_key].libraries[mapped_lib_key],
|
||||
lib_data,
|
||||
)
|
||||
|
||||
return merged_watched
|
||||
|
||||
|
||||
def check_same_identifiers(item1: MediaIdentifiers, item2: MediaIdentifiers) -> bool:
|
||||
# Check for duplicate based on file locations:
|
||||
if item1.locations and item2.locations:
|
||||
|
|
|
|||
Loading…
Reference in New Issue